Skip to content
Snippets Groups Projects
Commit e00d7d02 authored by dilawar's avatar dilawar :ant:
Browse files

chore: minor tweaks.

parent 6f4f6902
No related branches found
No related tags found
1 merge request!6Hotfix: fix to assert error post installation
Pipeline #3781 failed with stages
in 1 minute and 41 seconds
...@@ -13,7 +13,7 @@ from enum import Enum ...@@ -13,7 +13,7 @@ from enum import Enum
from rich.progress import track from rich.progress import track
import bitia.helper as bhelper import bitia.helper as bhelper
from bitia.logger import logger, cprint, set_logger_level from bitia.logger import logger, cprint, set_logger_level, console
from bitia import version as bversion from bitia import version as bversion
import bitia.pipeline as bpipeline import bitia.pipeline as bpipeline
...@@ -81,10 +81,13 @@ def create_remote_container( ...@@ -81,10 +81,13 @@ def create_remote_container(
@app.command("list-container") @app.command("list-container")
@session @session
def list_remote_container( def list_containers(user_input):
user_input, server: str = bconfig.get_server()
) -> T.List[str]:
"""List the remote server associated with the pipeline.""" """List the remote server associated with the pipeline."""
for container in _list_remote_container(user_input):
cprint(container)
def _list_remote_container(user_input) -> T.List[str]:
pipeline = bpipeline.user_input_to_pipeline(user_input) pipeline = bpipeline.user_input_to_pipeline(user_input)
logger.debug(f"sha256 of `{pipeline.zipfile}` is `{pipeline.checksum}`") logger.debug(f"sha256 of `{pipeline.zipfile}` is `{pipeline.checksum}`")
logger.info(f"Finding container for {user_input}, sha256sum={pipeline.checksum}") logger.info(f"Finding container for {user_input}, sha256sum={pipeline.checksum}")
...@@ -94,9 +97,7 @@ def list_remote_container( ...@@ -94,9 +97,7 @@ def list_remote_container(
params=dict(pipeline_sha256=pipeline.checksum), params=dict(pipeline_sha256=pipeline.checksum),
) )
res.raise_for_status() res.raise_for_status()
containers = res.json()["containers"].split(",") return res.json()["containers"].split(",")
cprint(containers)
return containers
@app.command("logs") @app.command("logs")
...@@ -113,7 +114,7 @@ def stream_log(user_input, server: str = bconfig.get_server()): ...@@ -113,7 +114,7 @@ def stream_log(user_input, server: str = bconfig.get_server()):
) )
res.raise_for_status() res.raise_for_status()
for line in res.iter_lines(): for line in res.iter_lines():
print(line.decode().rstrip()) cprint(line.decode().rstrip())
@app.command("submit") @app.command("submit")
...@@ -126,7 +127,7 @@ def submit_pipeline(user_input, *, rerun: bool = False, output_lines: T.List[str ...@@ -126,7 +127,7 @@ def submit_pipeline(user_input, *, rerun: bool = False, output_lines: T.List[str
""" """
res = create_remote_container(user_input, recreate=rerun, output_lines=output_lines) res = create_remote_container(user_input, recreate=rerun, output_lines=output_lines)
logger.info("Remote container: %s", res) logger.info("Remote container: %s", res)
containers = list_remote_container(user_input) containers = _list_remote_container(user_input)
cprint(f"{containers}") cprint(f"{containers}")
return containers return containers
...@@ -136,11 +137,10 @@ def submit_pipeline(user_input, *, rerun: bool = False, output_lines: T.List[str ...@@ -136,11 +137,10 @@ def submit_pipeline(user_input, *, rerun: bool = False, output_lines: T.List[str
def run_user_input(user_input, *, rerun: bool = False, output_lines: T.List[str] = []): def run_user_input(user_input, *, rerun: bool = False, output_lines: T.List[str] = []):
"""Run a pipeline""" """Run a pipeline"""
create_remote_container(user_input, recreate=rerun, output_lines=output_lines) create_remote_container(user_input, recreate=rerun, output_lines=output_lines)
containers = list_remote_container(user_input) containers = _list_remote_container(user_input)
return [ for container in containers:
bhelper.log_container(container, server=bconfig.get_server()) for _bl in bhelper.log_container(container, server=bconfig.get_server()):
for container in containers print(_bl.decode().rstrip())
]
@app.command("checksum") @app.command("checksum")
......
...@@ -23,12 +23,13 @@ from bitia.checksumdir import dirhash ...@@ -23,12 +23,13 @@ from bitia.checksumdir import dirhash
from bitia.logger import logger from bitia.logger import logger
def log_container(container: str, server: str): def log_container(container: str, server: str, *, timestamps: bool = False):
assert ( assert (
container container
), "Failed to determine the container that is runnning the pipeline. There is probably a bug in server end." ), "Failed to determine the container that is runnning the pipeline. There is probably a bug in server end."
for line in bsession.fetch_logs(container, server=server): for line in bsession.fetch_logs(container, server=server,
print(line.decode().rstrip()) timestamps=timestamps):
yield line
def _check_server_status(server: str) -> int: def _check_server_status(server: str) -> int:
......
...@@ -9,12 +9,12 @@ g_session = requests.Session() ...@@ -9,12 +9,12 @@ g_session = requests.Session()
SESSION_PICKLE_FILE = bconfig.bitia_dir() / ".session.pickle" SESSION_PICKLE_FILE = bconfig.bitia_dir() / ".session.pickle"
def fetch_logs(container: str, *, server): def fetch_logs(container: str, *, server, timestamps: bool = True):
"""Fetch logs from a container.""" """Fetch logs from a container."""
logger.info(f"Fetching logs for container `{container}`") logger.info(f"Fetching logs for container `{container}`")
return get( return get(
f"{server}/container/logs", f"{server}/container/logs",
params=dict(container=container), params=dict(container=container, timestamps=timestamps),
stream=True, stream=True,
) )
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment