Put scrypt warning on separate line to avoid 0 being shown on windows as bufsize.
diff --git a/miner.h b/miner.h
index 169c9ee..b2715d4 100644
--- a/miner.h
+++ b/miner.h
@@ -378,7 +378,7 @@ struct cgpu_info {
#ifdef USE_SCRYPT
int opt_lg, lookup_gap;
- int opt_tc, thread_concurrency;
+ size_t opt_tc, thread_concurrency;
int shaders;
#endif
struct timeval tv_gpustart;
diff --git a/ocl.c b/ocl.c
index 450a2d6..723ca30 100644
--- a/ocl.c
+++ b/ocl.c
@@ -810,8 +810,8 @@ built:
/* Use the max alloc value which has been rounded to a power of
* 2 greater >= required amount earlier */
if (bufsize > cgpu->max_alloc) {
- applog(LOG_WARNING, "Maximum buffer memory device %d supports says %u, your scrypt settings come to %u",
- gpu, cgpu->max_alloc, bufsize);
+ applog(LOG_WARNING, "Maximum buffer memory device %d supports says %u", gpu, cgpu->max_alloc);
+ applog(LOG_WARNING, "Your scrypt settings come to %u", bufsize);
} else
bufsize = cgpu->max_alloc;
applog(LOG_DEBUG, "Creating scrypt buffer sized %d", bufsize);