Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

How do I add two filters at once #1139

Open
hanzhenbjm opened this issue May 10, 2024 · 11 comments
Open

How do I add two filters at once #1139

hanzhenbjm opened this issue May 10, 2024 · 11 comments
Labels
question Further information is requested

Comments

@hanzhenbjm
Copy link

Hello, I have a question, How do I add two filters at once

def serialize(record):
    subset = {
        "log_type": "desc",
        "module": str(record["module"]),
        "logger": str(record["module"]),
        "process": record["process"].id,
        "processName": record["process"].name,
        "thread": record["thread"].id,
        "threadName": record["thread"].name,
        "transaction_id": client_addr_var.get(),
    }
    return json.dumps(subset, ensure_ascii=False)


def inner_flow(record):
    subset = {
        "address": record["extra"].get("address"),
        "request_headers": record["extra"].get("request_headers"),
        "request_time": record["extra"].get("request_time"),
        "response_code": record["extra"].get("response_code"),
        "response_headers": record["extra"].get("response_headers"),
        "response_payload": record["extra"].get("response_payload"),
        "response_remark": record["extra"].get("response_remark"),
        "response_time": record["extra"].get("response_time"),
        "total_time": record["extra"].get("total_time"),
        "transaction_id": record["extra"].get("transaction_id"),
    }
    return json.dumps(subset, ensure_ascii=False)


def level_filter(level):
    def is_level(record):
        # print(record["level"].name)
        return record["level"].name == level

    return is_level


def _logger_filter(record):
    record['transaction_id'] = f"{client_addr_var.get()}"
    return record['transaction_id']


def patching(record):
    record["extra"]["serialized"] = serialize(record)
    record["extra"]["inner"] = inner_flow(record)


logger.remove()
logger = logger.patch(patching)
logger.add(debug_path, format="{extra[serialized]}", filter=level_filter(level="DEBUG"), rotation="12:00",
           retention="15 days")
logger.add(error_path, format="{extra[serialized]}", filter=level_filter(level="ERROR"), rotation="12:00",
           retention="15 days")
logger.add(trace_path, format="{extra[serialized]}", filter=level_filter(level="TRACE"), level="TRACE",
           rotation="12:00", retention="15 days")
logger.add(warn_path, format="{extra[serialized]}", filter=level_filter(level="WARNING"), rotation="12:00",
           retention="15 days")
logger.add(info_path, format="{extra[inner]}", filter=level_filter(level="INFO"),
           rotation="12:00", retention="15 days")

Both level_filter and _logger_filter are supported,This parameter is used to obtain the transaction_id record request_id,

@Delgan
Copy link
Owner

Delgan commented May 10, 2024

Can you merge these filters into one unique function?

@hanzhenbjm
Copy link
Author

def debug_filter(record):
    return level_filter(level="DEBUG")(record) and _logger_filter(record)


def error_filter(record):
    return level_filter(level="ERROR")(record) and _logger_filter(record)


def trace_filter(record):
    return level_filter(level="TRACE")(record) and _logger_filter(record)


def warn_filter(record):
    return level_filter(level="WARNING")(record) and _logger_filter(record)


def info_filter(record):
    return level_filter(level="INFO")(record) and _logger_filter(record)

def patching(record):
    record["extra"]["serialized"] = serialize(record)
    record["extra"]["inner"] = inner_flow(record)


logger.remove()
logger = logger.patch(patching)
logger.add(debug_path, format="{extra[serialized]}", filter=debug_filter, rotation="12:00",
           retention="15 days")
logger.add(error_path, format="{extra[serialized]}", filter=error_filter, rotation="12:00",
           retention="15 days")
logger.add(trace_path, format="{extra[serialized]}", filter=trace_filter, level="TRACE",
           rotation="12:00", retention="15 days")
logger.add(warn_path, format="{extra[serialized]}", filter=warn_filter, rotation="12:00",
           retention="15 days")
logger.add(info_path, format="{extra[inner]}", filter=info_filter,
           rotation="12:00", retention="15 days")

like this?

@Delgan
Copy link
Owner

Delgan commented May 10, 2024

More like this:

def _level_filter(record, level):
    return record["level"].name == level

def _logger_filter(record):
    record['transaction_id'] = f"{client_addr_var.get()}"
    return record['transaction_id']

def make_filter(level):
    def my_filter(record):
        return _level_filter(record, level) and _logger_filter(record)
    return my_filter

logger.remove()
logger = logger.patch(patching)
logger.add(debug_path, format="{extra[serialized]}", filter=make_filter("DEBUG"), rotation="12:00",
           retention="15 days")
logger.add(error_path, format="{extra[serialized]}", filter=make_filter("ERROR"), rotation="12:00",
           retention="15 days")
logger.add(trace_path, format="{extra[serialized]}", filter=make_filter("TRACE"), level="TRACE",
           rotation="12:00", retention="15 days")
logger.add(warn_path, format="{extra[serialized]}", filter=make_filter("WARNING"), rotation="12:00",
           retention="15 days")
logger.add(info_path, format="{extra[inner]}", filter=make_filter("INFO"),
           rotation="12:00", retention="15 days")

@hanzhenbjm
Copy link
Author

hanzhenbjm commented May 10, 2024

thank you
request id use
with logger.contextualize(request_id=transaction_id)?

@Delgan
Copy link
Owner

Delgan commented May 10, 2024

request id use
with logger.contextualize(request_id=transaction_id)?

Sorry, I'm not sure to understand your question. Can you elaborate please?

@hanzhenbjm
Copy link
Author

In a complete request, the same uuid is added to each log, but different requests have different uuid

@Delgan
Copy link
Owner

Delgan commented May 10, 2024

I can hardly give you an answer without seeing your code. You can try using logger.contextualize(), usually it helps with this use case.

@hanzhenbjm
Copy link
Author

thank you

@Delgan Delgan added the question Further information is requested label May 10, 2024
@hanzhenbjm hanzhenbjm reopened this May 11, 2024
@hanzhenbjm
Copy link
Author

hanzhenbjm commented May 11, 2024

import os
import json
import time
import uuid
import socket
import asyncio
import datetime
from loguru import logger
from hypercorn.asyncio import serve
from hypercorn.config import Config
from fastapi import FastAPI, Request
from starlette.responses import Response
from starlette.middleware.base import BaseHTTPMiddleware

app = FastAPI()

test_log = "log"
LOG_BASE_PATH = r'D:/LOG/loguru/%s' % test_log
log_path = "%s/%s_p%s_" % (LOG_BASE_PATH, test_log, os.getpid())
debug_path = "%s_debug.log" % log_path
error_path = "%s_error.log" % log_path
trace_path = "%s_trace.log" % log_path
warn_path = "%s_warn.log" % log_path
info_path = "%s_info.log" % log_path


def serialize(record):
    subset = {
        "timestamp": datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S.%f")[:-3],
        "level": record["level"].name if record["level"].name != "WARNING" else "WARN",
        "message": record["message"],
        "log_time": datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S.%f")[:-3],
        "HOSTNAME": os.getenv("HOSTNAME", socket.gethostname()),
        "lineno": str(record["line"]),
        "process": record["process"].id,
        "processName": record["process"].name,
        "thread": record["thread"].id,
        "threadName": record["thread"].name,
        "transaction_id": record["extra"].get("request_id"),
    }
    return json.dumps(subset, ensure_ascii=False)


def inner_flow(record):
    subset = {
        "request_time": record["extra"].get("request_time"),
        "response_time": record["extra"].get("response_time"),
        "address": record["extra"].get("address"),
        "http_method": record["extra"].get("http_method"),
        "request_payload": record["extra"].get("request_payload"),
        "response_payload": record["extra"].get("response_payload"),
        "request_headers": record["extra"].get("request_headers"),
        "http_status_code": record["extra"].get("http_status_code"),
        "total_time": record["extra"].get("total_time"),
        "log_time": record["extra"].get("log_time"),
        "transaction_id": record["extra"].get("request_id"),

    }
    return json.dumps(subset, ensure_ascii=False)


def _level_filter(record, level):
    return record["level"].name == level


def _logger_filter(record):
    # record['transaction_id'] = f"{client_addr_var.get()}"
    # return record['transaction_id']
    return record


def make_filter(level):
    def my_filter(record):
        return _level_filter(record, level) and _logger_filter(record)

    return my_filter


def patching(record):
    record["extra"]["serialized"] = serialize(record)
    record["extra"]["inner"] = inner_flow(record)


logger.remove()
logger = logger.patch(patching)

logger.add(debug_path, format="{extra[serialized]}", filter=make_filter("DEBUG"), rotation="12:00",
           retention="15 days")
logger.add(error_path, format="{extra[serialized]}", filter=make_filter("ERROR"), rotation="12:00",
           retention="15 days")
logger.add(trace_path, format="{extra[serialized]}", filter=make_filter("TRACE"), level="TRACE",
           rotation="12:00", retention="15 days")
logger.add(warn_path, format="{extra[serialized]}", filter=make_filter("WARNING"), rotation="12:00",
           retention="15 days")
logger.add(info_path, format="{extra[inner]}", filter=make_filter("INFO"),
           rotation="12:00", retention="15 days")


class MiddlewareLog(BaseHTTPMiddleware):

    def __init__(self, app):
        super().__init__(app)

    async def dispatch(self, request: Request, call_next) -> Response:
        transaction_id = str(uuid.uuid4()).replace('-', '')
        with logger.contextualize(request_id=transaction_id):
            header = request.headers.get("user-agent")
            request_url = str(request.url)
            request_time = datetime.datetime.now()
            http_method = request.method
            if http_method == "GET":
                request_content = str(request.query_params)
            elif http_method == "POST":
                body_ = await request.json()
                request_content = str(body_)
            logger.warning("params=%s" % request_content)
            response = await call_next(request)
            method_code = request.scope.get("route").name if request.scope.get("route") is not None else "",
            if request_url.rfind("docs") != -1:
                return response
            response_time = datetime.datetime.now()
            body = b""
            async for chunk in response.body_iterator:
                body += chunk
            result = body.decode("utf-8")
            logger.bind(
                request_time=request_time.strftime("%Y-%m-%d %H:%M:%S.%f")[:-3],
                response_time=response_time.strftime("%Y-%m-%d %H:%M:%S.%f")[:-3],
                address=request_url,
                http_method=http_method,
                request_payload=request_content,
                response_payload=str(result),
                request_headers=header,
                http_status_code=response.status_code,
                total_time=int(time.mktime(response_time.timetuple()) * 1000 - time.mktime(
                    request_time.timetuple()) * 1000),
                log_time=datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S.%f")[:-3], ).info("finish")

            return Response(
                content=body,
                status_code=response.status_code,
                headers=dict(response.headers),
                media_type=response.media_type
            )


app.add_middleware(MiddlewareLog)
config = Config()
config.bind = ["127.0.0.1:9999"]


@app.get("/")
async def hello_world(request: Request):
    logger.warning("hello world")
    return Response("hello world")


async def run_server():
    await serve(app, config)


if __name__ == '__main__':
    asyncio.run(run_server())

How does this piece of loguru code need to be optimized to make it faster,thank you

@Delgan
Copy link
Owner

Delgan commented May 11, 2024

Ideally, you should group your logs into a single file instead of splitting them into five files. This should improve performance.

If you want to view logs separately by level, you should perform post-processing of your log file.

@hanzhenbjm
Copy link
Author

hanzhenbjm commented May 11, 2024

Under normal circumstances, the log file is a separate file, so it is just for convenience to see, please ask the custom log this code can be written, thank you very much

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment
Labels
question Further information is requested
Projects
None yet
Development

No branches or pull requests

2 participants