summaryrefslogtreecommitdiff
path: root/src/ext/csiphash.c
diff options
context:
space:
mode:
Diffstat (limited to 'src/ext/csiphash.c')
-rw-r--r--src/ext/csiphash.c9
1 files changed, 9 insertions, 0 deletions
diff --git a/src/ext/csiphash.c b/src/ext/csiphash.c
index 4f58c4a141..c247886038 100644
--- a/src/ext/csiphash.c
+++ b/src/ext/csiphash.c
@@ -46,6 +46,10 @@
#elif defined(__APPLE__)
# include <libkern/OSByteOrder.h>
# define _le64toh(x) OSSwapLittleToHostInt64(x)
+#elif defined(sun) || defined(__sun)
+# include <sys/byteorder.h>
+# define _le64toh(x) LE_64(x)
+
#else
/* See: http://sourceforge.net/p/predef/wiki/Endianness/ */
@@ -81,11 +85,16 @@
HALF_ROUND(v0,v1,v2,v3,13,16); \
HALF_ROUND(v2,v1,v0,v3,17,21);
+#if 0
+/* This does not seem to save very much runtime in the fast case, and it's
+ * potentially a big loss in the slow case where we're misaligned and we cross
+ * a cache line. */
#if (defined(__i386) || defined(__i386__) || defined(_M_IX86) || \
defined(__x86_64) || defined(__x86_64__) || \
defined(_M_AMD64) || defined(_M_X64) || defined(__INTEL__))
# define UNALIGNED_OK 1
#endif
+#endif
uint64_t siphash24(const void *src, unsigned long src_sz, const struct sipkey *key) {
uint64_t k0 = key->k0;