Skip to content

Commit

Permalink
reduce long log lines
Browse files Browse the repository at this point in the history
  • Loading branch information
jreadey committed Dec 11, 2023
1 parent 219e84d commit 10881c8
Show file tree
Hide file tree
Showing 6 changed files with 64 additions and 19 deletions.
27 changes: 21 additions & 6 deletions hsds/chunk_crawl.py
Original file line number Diff line number Diff line change
Expand Up @@ -201,7 +201,18 @@ async def read_chunk_hyperslab(
log.warn(f"expected to find {chunk_id} in chunk_map")
return
chunk_info = chunk_map[chunk_id]
log.debug(f"using chunk_map entry for {chunk_id}: {chunk_info}")
log.debug(f"using chunk_map entry for {chunk_id}")
if "points" in chunk_info:
points = chunk_info["points"]
log.debug(f"chunkinfo {len(points)} points")
elif "chunk_sel" in chunk_info:
chunk_sel = chunk_info["chunk_sel"]
log.debug(f"chunkinfo - chunk_sel: {chunk_sel}")
elif "data_sel" in chunk_info:
data_sel = chunk_info["data_sel"]
log.debug(f"chunkinfo - data_sel: {data_sel}")
else:
log.warn(f"unexpected chunkinfo: {chunk_info}")

partition_chunk_id = getChunkIdForPartition(chunk_id, dset_json)
if partition_chunk_id != chunk_id:
Expand Down Expand Up @@ -624,7 +635,10 @@ def __init__(
max_tasks_per_node = config.get("max_tasks_per_node_per_request", default=16)
client_pool_count = config.get("client_pool_count", default=10)
log.info(f"ChunkCrawler.__init__ {len(chunk_ids)} chunks, action={action}")
log.debug(f"ChunkCrawler - chunk_ids: {chunk_ids}")
if len(chunk_ids) < 10:
log.debug(f"ChunkCrawler - chunk_ids: {chunk_ids}")
else:
log.debug(f"ChunkCrawler - chunk_ids: {chunk_ids[:10]} ...")

self._app = app
self._slices = slices
Expand Down Expand Up @@ -652,6 +666,7 @@ def __init__(
self._max_tasks = max_tasks
else:
self._max_tasks = len(chunk_ids)
log.debug(f"ChunkCrawler max_tasks: {max_tasks}")

if self._max_tasks >= client_pool_count:
self._client_pool = 1
Expand Down Expand Up @@ -705,7 +720,8 @@ async def work(self):
this_task = asyncio.current_task()
task_name = this_task.get_name()
log.info(f"ChunkCrawler - work method for task: {task_name}")
client_name = f"{task_name}.{random.randrange(0,self._client_pool)}"
task_suffix = random.randrange(0, self._client_pool)
client_name = f"{task_name}.{task_suffix}"
log.info(f"ChunkCrawler - client_name: {client_name}")
while True:
try:
Expand Down Expand Up @@ -769,9 +785,8 @@ async def do_work(self, chunk_id, client=None):
bucket=self._bucket,
client=client,
)
log.debug(
f"read_chunk_hyperslab - got 200 status for chunk_id: {chunk_id}"
)
msg = f"read_chunk_hyperslab - got 200 status for chunk_id: {chunk_id}"
log.debug(msg)
status_code = 200
elif self._action == "write_chunk_hyperslab":
await write_chunk_hyperslab(
Expand Down
5 changes: 4 additions & 1 deletion hsds/chunk_sn.py
Original file line number Diff line number Diff line change
Expand Up @@ -599,7 +599,10 @@ async def _doHyperslabWrite(app,
except ValueError:
log.warn("getChunkIds failed")
raise HTTPInternalServerError()
log.debug(f"chunk_ids: {chunk_ids}")
if len(chunk_ids) < 10:
log.debug(f"chunk_ids: {chunk_ids}")
else:
log.debug(f"chunk_ids: {chunk_ids[:10]} ...")
if len(chunk_ids) > max_chunks:
msg = f"got {len(chunk_ids)} for page: {page_number}. max_chunks: {max_chunks}"
log.warn(msg)
Expand Down
14 changes: 8 additions & 6 deletions hsds/dset_lib.py
Original file line number Diff line number Diff line change
Expand Up @@ -317,7 +317,7 @@ def get_chunkmap_selections(chunk_map, chunk_ids, slices, dset_json):
"""Update chunk_map with chunk and data selections for the
given set of slices
"""
log.debug(f"get_chunkmap_selections - chunk_ids: {chunk_ids}")
log.debug(f"get_chunkmap_selections - {len(chunk_ids)} chunk_ids")
if not slices:
log.debug("no slices set, returning")
return # nothing to do
Expand All @@ -344,7 +344,7 @@ def get_chunk_selections(chunk_map, chunk_ids, slices, dset_json):
"""Update chunk_map with chunk and data selections for the
given set of slices
"""
log.debug(f"get_chunk_selections - chunk_ids: {chunk_ids}")
log.debug(f"get_chunk_selections - {len(chunk_ids)} chunk_ids")
if not slices:
log.debug("no slices set, returning")
return # nothing to do
Expand All @@ -358,9 +358,8 @@ def get_chunk_selections(chunk_map, chunk_ids, slices, dset_json):
chunk_map[chunk_id] = item

chunk_sel = getChunkCoverage(chunk_id, slices, layout)
log.debug(
f"get_chunk_selections - chunk_id: {chunk_id}, chunk_sel: {chunk_sel}"
)
msg = f"get_chunk_selections - chunk_id: {chunk_id}, chunk_sel: {chunk_sel}"
log.debug(msg)
item["chunk_sel"] = chunk_sel
data_sel = getDataCoverage(chunk_id, slices, layout)
log.debug(f"get_chunk_selections - data_sel: {data_sel}")
Expand Down Expand Up @@ -493,7 +492,10 @@ async def doReadSelection(
):
"""read selection utility function"""
log.info(f"doReadSelection - number of chunk_ids: {len(chunk_ids)}")
log.debug(f"doReadSelection - chunk_ids: {chunk_ids}")
if len(chunk_ids) < 10:
log.debug(f"chunk_ids: {chunk_ids}")
else:
log.debug(f"chunk_ids: {chunk_ids[:10]} ...")
log.debug(f"doReadSelection - select_dtype: {select_dtype}")

type_json = dset_json["type"]
Expand Down
23 changes: 22 additions & 1 deletion hsds/hsds_logger.py
Original file line number Diff line number Diff line change
Expand Up @@ -158,6 +158,8 @@ def request(req):
if max_task_count and active_tasks > max_task_count:
warning(f"more than {max_task_count} tasks, returning 503")
raise HTTPServiceUnavailable()
else:
debug(f"active_tasks: {active_tasks} max_tasks: {max_task_count}")


def response(req, resp=None, code=None, message=None):
Expand All @@ -178,9 +180,28 @@ def response(req, resp=None, code=None, message=None):

log_level = config["log_level"]

if log_level <= level:
if log_level == DEBUG:
prefix = config["prefix"]
ts = _timestamp()

num_tasks = len(asyncio.all_tasks())
active_tasks = _activeTaskCount()

debug(f"rsp - num tasks: {num_tasks} active tasks: {active_tasks}")

s = "{}{} RSP> <{}> ({}): {}"
print(s.format(prefix, ts, code, message, req.path))

elif log_level <= level:
prefix = config["prefix"]
ts = _timestamp()

num_tasks = len(asyncio.all_tasks())
active_tasks = _activeTaskCount()

debug(f"num tasks: {num_tasks} active tasks: {active_tasks}")

s = "{}{} RSP> <{}> ({}): {}"
print(s.format(prefix, ts, code, message, req.path))
else:
pass
3 changes: 2 additions & 1 deletion hsds/util/dsetUtil.py
Original file line number Diff line number Diff line change
Expand Up @@ -910,12 +910,13 @@ def getChunkLayout(dset_json):
def getChunkInitializer(dset_json):
""" get initializer application and arguments if set """
initializer = None
log.debug(f"getChunkInitializer({dset_json})")
if "creationProperties" in dset_json:
cprops = dset_json["creationProperties"]
log.debug(f"get creationProperties: {cprops}")
if "initializer" in cprops:
initializer = cprops["initializer"]
dset_id = dset_json["id"]
log.debug(f"returning chunk initializer: {initializer} for dset: {dset_id}")
return initializer


Expand Down
11 changes: 7 additions & 4 deletions hsds/util/storUtil.py
Original file line number Diff line number Diff line change
Expand Up @@ -220,10 +220,13 @@ def _uncompress(data, compressor=None, shuffle=0, level=None, dtype=None, chunk_
raise HTTPInternalServerError()
if shuffle:
data = _unshuffle(shuffle, data, dtype=dtype, chunk_shape=chunk_shape)
finish_time = time.time()
elapsed = finish_time - start_time
msg = f"uncompressed {len(data)} bytes, {(elapsed):.3f}s elapsed"
log.debug(msg)

if compressor or shuffle:
# log the decompression time
finish_time = time.time()
elapsed = finish_time - start_time
msg = f"uncompressed {len(data)} bytes, {(elapsed):.3f}s elapsed"
log.debug(msg)

return data

Expand Down

0 comments on commit 10881c8

Please sign in to comment.