Доработал закрытие, техоперации автоматом переключаются, добавил выгрузку сменных заданий
All checks were successful
Deploy MES Core / deploy (push) Successful in 14s
All checks were successful
Deploy MES Core / deploy (push) Successful in 14s
This commit is contained in:
@@ -1,12 +1,22 @@
|
||||
import logging
|
||||
from django.db import transaction
|
||||
from django.db.models import Q, Case, When, Value, IntegerField
|
||||
from django.db.models import Q, Case, When, Value, IntegerField, Sum
|
||||
from django.db.models.functions import Coalesce
|
||||
from django.utils import timezone
|
||||
from warehouse.models import StockItem
|
||||
from shiftflow.models import WorkItem, CuttingSession, ProductionReportConsumption, ProductionReportStockResult
|
||||
from shiftflow.models import (
|
||||
DealEntityProgress,
|
||||
DealItem,
|
||||
ProductionTask,
|
||||
WorkItem,
|
||||
CuttingSession,
|
||||
ProductionReportConsumption,
|
||||
ProductionReportStockResult,
|
||||
)
|
||||
from shiftflow.services.bom_explosion import _build_bom_graph
|
||||
from shiftflow.services.kitting import get_work_location_for_workitem
|
||||
from manufacturing.models import EntityOperation
|
||||
from shiftflow.services.route_flow import advance_progress_and_generate_next_workitem
|
||||
from manufacturing.models import EntityOperation, Operation
|
||||
|
||||
|
||||
def get_first_operation_id(entity_id: int) -> int | None:
|
||||
@@ -189,6 +199,59 @@ def apply_assembly_closing(workitem_id: int, fact_qty: int, user_id: int) -> boo
|
||||
if workitem.quantity_done >= workitem.quantity_plan:
|
||||
workitem.status = 'done'
|
||||
workitem.save(update_fields=['quantity_done', 'quantity_reported', 'status'])
|
||||
advance_progress_and_generate_next_workitem(workitem_id=int(workitem.id))
|
||||
|
||||
target_qty = None
|
||||
if getattr(workitem, 'delivery_batch_id', None):
|
||||
target_qty = ProductionTask.objects.filter(
|
||||
deal_id=workitem.deal_id,
|
||||
delivery_batch_id=workitem.delivery_batch_id,
|
||||
entity_id=workitem.entity_id,
|
||||
).values_list('quantity_ordered', flat=True).first()
|
||||
else:
|
||||
di = DealItem.objects.filter(deal_id=workitem.deal_id, entity_id=workitem.entity_id).first()
|
||||
target_qty = int(di.quantity) if di else None
|
||||
|
||||
if target_qty is not None:
|
||||
op_code = ''
|
||||
if getattr(workitem, 'operation_id', None):
|
||||
op_code = (Operation.objects.filter(pk=workitem.operation_id).values_list('code', flat=True).first() or '').strip()
|
||||
if not op_code:
|
||||
op_code = (workitem.stage or '').strip()
|
||||
|
||||
if op_code:
|
||||
progress = (
|
||||
DealEntityProgress.objects.select_for_update(of=('self',))
|
||||
.filter(
|
||||
deal_id=workitem.deal_id,
|
||||
delivery_batch_id=(int(workitem.delivery_batch_id) if getattr(workitem, 'delivery_batch_id', None) else None),
|
||||
entity_id=workitem.entity_id,
|
||||
)
|
||||
.first()
|
||||
)
|
||||
if not progress:
|
||||
progress = DealEntityProgress.objects.create(
|
||||
deal_id=workitem.deal_id,
|
||||
delivery_batch_id=(int(workitem.delivery_batch_id) if getattr(workitem, 'delivery_batch_id', None) else None),
|
||||
entity_id=workitem.entity_id,
|
||||
current_seq=1,
|
||||
)
|
||||
|
||||
cur = int(progress.current_seq or 1)
|
||||
cur_eo = EntityOperation.objects.select_related('operation').filter(entity_id=workitem.entity_id, seq=cur).first()
|
||||
if cur_eo and cur_eo.operation and (cur_eo.operation.code or '').strip() == op_code:
|
||||
wi_qs = WorkItem.objects.filter(deal_id=workitem.deal_id, entity_id=workitem.entity_id).filter(
|
||||
Q(operation__code=op_code) | Q(stage=op_code)
|
||||
)
|
||||
if getattr(workitem, 'delivery_batch_id', None):
|
||||
wi_qs = wi_qs.filter(delivery_batch_id=workitem.delivery_batch_id)
|
||||
else:
|
||||
wi_qs = wi_qs.filter(delivery_batch_id__isnull=True)
|
||||
|
||||
total_done = wi_qs.aggregate(s=Coalesce(Sum('quantity_done'), 0))['s']
|
||||
if int(total_done or 0) >= int(target_qty):
|
||||
progress.current_seq = cur + 1
|
||||
progress.save(update_fields=['current_seq'])
|
||||
|
||||
logger.info(
|
||||
'assembly_closing:done workitem_id=%s qty=%s deal_id=%s location_id=%s user_id=%s report_id=%s',
|
||||
|
||||
@@ -11,6 +11,7 @@ from shiftflow.models import (
|
||||
ShiftItem,
|
||||
)
|
||||
from shiftflow.services.sessions import close_cutting_session
|
||||
from shiftflow.services.route_flow import advance_progress_and_generate_next_workitem
|
||||
|
||||
logger = logging.getLogger('mes')
|
||||
|
||||
@@ -208,6 +209,7 @@ def apply_closing_workitems(
|
||||
else:
|
||||
wi.status = 'planned'
|
||||
wi.save(update_fields=['quantity_done', 'quantity_reported', 'status'])
|
||||
advance_progress_and_generate_next_workitem(workitem_id=int(wi.id))
|
||||
|
||||
for stock_item_id, qty in consumptions.items():
|
||||
if qty and float(qty) > 0:
|
||||
|
||||
134
shiftflow/services/route_flow.py
Normal file
134
shiftflow/services/route_flow.py
Normal file
@@ -0,0 +1,134 @@
|
||||
import logging
|
||||
|
||||
from django.db import transaction
|
||||
from django.db.models import Q, Sum
|
||||
from django.db.models.functions import Coalesce
|
||||
from django.utils import timezone
|
||||
|
||||
from manufacturing.models import EntityOperation
|
||||
from shiftflow.models import DealEntityProgress, DealItem, ProductionTask, WorkItem
|
||||
|
||||
logger = logging.getLogger('mes')
|
||||
|
||||
|
||||
def _workitem_op_code(wi: WorkItem) -> str:
|
||||
if getattr(wi, 'operation_id', None) and getattr(wi, 'operation', None):
|
||||
code = (wi.operation.code or '').strip()
|
||||
if code:
|
||||
return code
|
||||
return (wi.stage or '').strip()
|
||||
|
||||
|
||||
def _target_qty_for_workitem(wi: WorkItem) -> int | None:
|
||||
if getattr(wi, 'delivery_batch_id', None):
|
||||
qty = (
|
||||
ProductionTask.objects.filter(
|
||||
deal_id=wi.deal_id,
|
||||
delivery_batch_id=wi.delivery_batch_id,
|
||||
entity_id=wi.entity_id,
|
||||
)
|
||||
.values_list('quantity_ordered', flat=True)
|
||||
.first()
|
||||
)
|
||||
return int(qty) if qty is not None else None
|
||||
|
||||
di = DealItem.objects.filter(deal_id=wi.deal_id, entity_id=wi.entity_id).first()
|
||||
return int(di.quantity) if di else None
|
||||
|
||||
|
||||
@transaction.atomic
|
||||
def advance_progress_and_generate_next_workitem(*, workitem_id: int) -> int | None:
|
||||
wi = (
|
||||
WorkItem.objects.select_for_update(of=('self',))
|
||||
.select_related('operation')
|
||||
.filter(id=int(workitem_id))
|
||||
.first()
|
||||
)
|
||||
if not wi:
|
||||
return None
|
||||
|
||||
op_code = _workitem_op_code(wi)
|
||||
if not op_code:
|
||||
return None
|
||||
|
||||
target_qty = _target_qty_for_workitem(wi)
|
||||
if target_qty is None:
|
||||
return None
|
||||
|
||||
progress, _ = DealEntityProgress.objects.select_for_update(of=('self',)).get_or_create(
|
||||
deal_id=wi.deal_id,
|
||||
delivery_batch_id=(int(wi.delivery_batch_id) if getattr(wi, 'delivery_batch_id', None) else None),
|
||||
entity_id=wi.entity_id,
|
||||
defaults={'current_seq': 1},
|
||||
)
|
||||
|
||||
cur = int(progress.current_seq or 1)
|
||||
cur_eo = EntityOperation.objects.select_related('operation').filter(entity_id=wi.entity_id, seq=cur).first()
|
||||
if not cur_eo or not cur_eo.operation:
|
||||
return None
|
||||
|
||||
cur_code = (cur_eo.operation.code or '').strip()
|
||||
if cur_code != op_code:
|
||||
return None
|
||||
|
||||
wi_qs = WorkItem.objects.filter(deal_id=wi.deal_id, entity_id=wi.entity_id).filter(Q(operation__code=op_code) | Q(stage=op_code))
|
||||
if getattr(wi, 'delivery_batch_id', None):
|
||||
wi_qs = wi_qs.filter(delivery_batch_id=wi.delivery_batch_id)
|
||||
else:
|
||||
wi_qs = wi_qs.filter(delivery_batch_id__isnull=True)
|
||||
|
||||
total_done = wi_qs.aggregate(s=Coalesce(Sum('quantity_done'), 0))['s']
|
||||
if int(total_done or 0) < int(target_qty):
|
||||
return None
|
||||
|
||||
progress.current_seq = cur + 1
|
||||
progress.save(update_fields=['current_seq'])
|
||||
|
||||
next_eo = (
|
||||
EntityOperation.objects.select_related('operation', 'operation__workshop')
|
||||
.filter(entity_id=wi.entity_id, seq=int(progress.current_seq))
|
||||
.first()
|
||||
)
|
||||
if not next_eo or not next_eo.operation:
|
||||
return None
|
||||
|
||||
next_op = next_eo.operation
|
||||
next_code = (next_op.code or '').strip()
|
||||
|
||||
planned_qs = WorkItem.objects.filter(deal_id=wi.deal_id, entity_id=wi.entity_id)
|
||||
if getattr(wi, 'delivery_batch_id', None):
|
||||
planned_qs = planned_qs.filter(delivery_batch_id=wi.delivery_batch_id)
|
||||
else:
|
||||
planned_qs = planned_qs.filter(delivery_batch_id__isnull=True)
|
||||
|
||||
planned_total = planned_qs.filter(Q(operation_id=next_op.id) | Q(operation__code=next_code) | Q(stage=next_code)).aggregate(
|
||||
s=Coalesce(Sum('quantity_plan'), 0)
|
||||
)['s']
|
||||
|
||||
remaining_to_plan = max(0, int(target_qty) - int(planned_total or 0))
|
||||
if remaining_to_plan <= 0:
|
||||
return None
|
||||
|
||||
created = WorkItem.objects.create(
|
||||
deal_id=wi.deal_id,
|
||||
delivery_batch_id=(int(wi.delivery_batch_id) if getattr(wi, 'delivery_batch_id', None) else None),
|
||||
entity_id=wi.entity_id,
|
||||
operation_id=next_op.id,
|
||||
stage=(next_code or next_op.name or '')[:32],
|
||||
workshop_id=(int(next_op.workshop_id) if getattr(next_op, 'workshop_id', None) else None),
|
||||
machine_id=None,
|
||||
quantity_plan=int(remaining_to_plan),
|
||||
quantity_done=0,
|
||||
status='planned',
|
||||
date=timezone.localdate(),
|
||||
)
|
||||
logger.info(
|
||||
'route_flow:created_next_workitem id=%s deal_id=%s batch_id=%s entity_id=%s op=%s qty=%s',
|
||||
created.id,
|
||||
created.deal_id,
|
||||
getattr(created, 'delivery_batch_id', None),
|
||||
created.entity_id,
|
||||
next_code or '',
|
||||
created.quantity_plan,
|
||||
)
|
||||
return int(created.id)
|
||||
449
shiftflow/services/workitem_registry_export.py
Normal file
449
shiftflow/services/workitem_registry_export.py
Normal file
@@ -0,0 +1,449 @@
|
||||
"""
|
||||
Сервис выгрузки сменного задания из реестра WorkItem в ZIP.
|
||||
|
||||
Задача:
|
||||
- отдать оператору офлайн-пакет без сохранения на диск (всё в памяти);
|
||||
- внутри архива:
|
||||
- HTML (строго тот же шаблон, что и кнопка «Печать»),
|
||||
- TXT (текстовая версия),
|
||||
- manifest.txt,
|
||||
- файлы КД (DXF/IGES/STEP/PDF) разложенные по папкам материала.
|
||||
- выгружаем только сделки в статусе «В работе».
|
||||
|
||||
Особенности:
|
||||
- опциональная транслитерация имён файлов/папок для совместимости с “железом” и Windows-путями;
|
||||
- для типового случая ленточнопилы (нет чертежей/IGES) в HTML/TXT должна быть видна длина заготовки,
|
||||
если она хранится в ProductEntity.blank_length_mm.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import io
|
||||
import logging
|
||||
import re
|
||||
import zipfile
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime
|
||||
from django.template.loader import render_to_string
|
||||
from django.utils import timezone
|
||||
|
||||
from shiftflow.models import WorkItem
|
||||
|
||||
logger = logging.getLogger("mes")
|
||||
|
||||
|
||||
_RU_TRANSLIT_MAP = {
|
||||
"а": "a",
|
||||
"б": "b",
|
||||
"в": "v",
|
||||
"г": "g",
|
||||
"д": "d",
|
||||
"е": "e",
|
||||
"ё": "yo",
|
||||
"ж": "zh",
|
||||
"з": "z",
|
||||
"и": "i",
|
||||
"й": "y",
|
||||
"к": "k",
|
||||
"л": "l",
|
||||
"м": "m",
|
||||
"н": "n",
|
||||
"о": "o",
|
||||
"п": "p",
|
||||
"р": "r",
|
||||
"с": "s",
|
||||
"т": "t",
|
||||
"у": "u",
|
||||
"ф": "f",
|
||||
"х": "h",
|
||||
"ц": "ts",
|
||||
"ч": "ch",
|
||||
"ш": "sh",
|
||||
"щ": "sch",
|
||||
"ъ": "",
|
||||
"ы": "y",
|
||||
"ь": "",
|
||||
"э": "e",
|
||||
"ю": "yu",
|
||||
"я": "ya",
|
||||
}
|
||||
|
||||
_INVALID_WIN_CHARS_RE = re.compile(r'[<>:"/\\|?*\x00-\x1F]')
|
||||
_MULTISPACE_RE = re.compile(r"\s+")
|
||||
_MULTI_DOTS_RE = re.compile(r"\.{2,}")
|
||||
|
||||
|
||||
def _transliterate_ru(s: str) -> str:
|
||||
"""
|
||||
Простейшая транслитерация RU->LAT для имён файлов.
|
||||
|
||||
Почему не slugify:
|
||||
- стандартный slugify без сторонних библиотек не транслитерирует кириллицу (вырезает),
|
||||
а нам важно сохранить читаемость.
|
||||
"""
|
||||
out: list[str] = []
|
||||
for ch in (s or ""):
|
||||
low = ch.lower()
|
||||
if low in _RU_TRANSLIT_MAP:
|
||||
t = _RU_TRANSLIT_MAP[low]
|
||||
out.append(t.upper() if ch.isupper() and t else t)
|
||||
else:
|
||||
out.append(ch)
|
||||
return "".join(out)
|
||||
|
||||
|
||||
def _sanitize_fs_component(s: str) -> str:
|
||||
"""
|
||||
Делает строку безопасной как имя файла/папки (под Windows).
|
||||
|
||||
Правила:
|
||||
- вычищаем недопустимые символы;
|
||||
- нормализуем пробелы;
|
||||
- не допускаем пустых имён;
|
||||
- убираем точки/пробелы на конце (Windows).
|
||||
"""
|
||||
s = (s or "").strip()
|
||||
s = _INVALID_WIN_CHARS_RE.sub("_", s)
|
||||
s = _MULTISPACE_RE.sub(" ", s).strip()
|
||||
s = _MULTI_DOTS_RE.sub(".", s)
|
||||
s = s.strip(" .")
|
||||
return s or "unnamed"
|
||||
|
||||
|
||||
def _maybe_translit_and_sanitize(s: str, *, translit: bool) -> str:
|
||||
"""
|
||||
Применяет транслитерацию (если включено) и затем sanitizing.
|
||||
"""
|
||||
if translit:
|
||||
s = _transliterate_ru(s)
|
||||
return _sanitize_fs_component(s)
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class _ExportContext:
|
||||
"""
|
||||
Контекст выгрузки для форматов HTML/TXT и manifest.
|
||||
"""
|
||||
|
||||
printed_at: datetime
|
||||
print_date: datetime.date
|
||||
export_bar_text: str
|
||||
only_work_deals: bool
|
||||
translit: bool
|
||||
|
||||
|
||||
def _parse_print_date_from_request(request) -> datetime.date:
|
||||
"""
|
||||
Определяет «дату печати» так же, как это обычно делается в печатных формах:
|
||||
берём конец периода, иначе начало, иначе сегодня.
|
||||
"""
|
||||
start_date = (request.GET.get("start_date") or "").strip()
|
||||
end_date = (request.GET.get("end_date") or "").strip()
|
||||
raw = end_date or start_date
|
||||
if raw:
|
||||
try:
|
||||
return datetime.strptime(raw, "%Y-%m-%d").date()
|
||||
except ValueError:
|
||||
return timezone.localdate()
|
||||
return timezone.localdate()
|
||||
|
||||
|
||||
def _fetch_workitems_by_ids(*, workitem_ids: list[int]) -> list[WorkItem]:
|
||||
"""Возвращает список WorkItem по id с нужными связями для рендера/экспорта."""
|
||||
ids = [int(x) for x in (workitem_ids or []) if int(x) > 0]
|
||||
if not ids:
|
||||
return []
|
||||
return list(
|
||||
WorkItem.objects.select_related(
|
||||
'deal',
|
||||
'entity',
|
||||
'entity__planned_material',
|
||||
'operation',
|
||||
'machine',
|
||||
'workshop',
|
||||
)
|
||||
.filter(id__in=ids)
|
||||
.order_by('-date', 'deal__number', 'id')
|
||||
)
|
||||
|
||||
|
||||
def _group_workitems(rows: list[WorkItem]) -> list[dict]:
|
||||
"""
|
||||
Группирует WorkItem так же, как печатная форма:
|
||||
ключ = (цех, станок/пост), значение = список работ.
|
||||
"""
|
||||
groups: dict[tuple[str, str], dict] = {}
|
||||
for wi in rows:
|
||||
ws_label = wi.workshop.name if wi.workshop else "—"
|
||||
m_label = wi.machine.name if wi.machine else ""
|
||||
key = (ws_label, m_label)
|
||||
g = groups.get(key)
|
||||
if not g:
|
||||
g = {"workshop": ws_label, "machine": m_label, "items": []}
|
||||
groups[key] = g
|
||||
g["items"].append(wi)
|
||||
return list(groups.values())
|
||||
|
||||
|
||||
def _render_html(*, request, groups: list[dict], ctx: _ExportContext) -> str:
|
||||
"""
|
||||
Рендерит HTML строго тем же шаблоном, что и кнопка «Печать».
|
||||
"""
|
||||
context = {
|
||||
"groups": groups,
|
||||
"printed_at": ctx.printed_at,
|
||||
"print_date": ctx.print_date,
|
||||
"export_bar_text": ctx.export_bar_text,
|
||||
}
|
||||
return render_to_string("shiftflow/registry_workitems_print.html", context, request=request)
|
||||
|
||||
|
||||
def _render_txt(*, groups: list[dict], ctx: _ExportContext) -> str:
|
||||
"""
|
||||
Формирует TXT-версию выгрузки.
|
||||
|
||||
Принцип:
|
||||
- читаемо в блокноте;
|
||||
- фиксированный порядок колонок;
|
||||
- добавляем длину заготовки, если у сущности нет файла КД и задан blank_length_mm.
|
||||
"""
|
||||
lines: list[str] = []
|
||||
lines.append("СМЕННОЕ ЗАДАНИЕ (выгрузка из MES)")
|
||||
lines.append(f"{ctx.export_bar_text}")
|
||||
if ctx.only_work_deals:
|
||||
lines.append("Ограничение: только сделки в статусе «В работе».")
|
||||
lines.append("")
|
||||
|
||||
for g in groups:
|
||||
ws = (g.get("workshop") or "—").strip()
|
||||
m = (g.get("machine") or "").strip()
|
||||
header = f"Цех: {ws}"
|
||||
if m:
|
||||
header += f" | Станок/пост: {m}"
|
||||
lines.append(header)
|
||||
lines.append("Дата\tСделка\tОперация\tПозиция\tМатериал\tПлан\tФакт")
|
||||
|
||||
for wi in (g.get("items") or []):
|
||||
deal_no = getattr(getattr(wi, "deal", None), "number", "") or "-"
|
||||
op_name = (getattr(getattr(wi, "operation", None), "name", "") or getattr(wi, "stage", "") or "—").strip()
|
||||
|
||||
ent = getattr(wi, "entity", None)
|
||||
dno = (getattr(ent, "drawing_number", "") or "").strip()
|
||||
ename = (getattr(ent, "name", "") or "").strip()
|
||||
pos = f"{dno} {ename}".strip() if dno or ename else "—"
|
||||
|
||||
has_kd = bool(getattr(ent, "dxf_file", None))
|
||||
length_mm = getattr(ent, "blank_length_mm", None)
|
||||
if (not has_kd) and (not dno) and length_mm:
|
||||
pos = f"{pos} | Длина: {int(round(float(length_mm)))} мм"
|
||||
|
||||
mat = getattr(ent, "planned_material", None)
|
||||
mat_label = "—"
|
||||
if mat:
|
||||
mat_label = (getattr(mat, "full_name", "") or getattr(mat, "name", "") or "—").strip()
|
||||
|
||||
dt = wi.date.strftime("%d.%m.%y") if getattr(wi, "date", None) else ""
|
||||
plan = str(int(getattr(wi, "quantity_plan", 0) or 0))
|
||||
fact = str(int(getattr(wi, "quantity_done", 0) or 0))
|
||||
|
||||
lines.append(f"{dt}\t{deal_no}\t{op_name}\t{pos}\t{mat_label}\t{plan}\t{fact}")
|
||||
|
||||
lines.append("")
|
||||
|
||||
return "\n".join(lines).strip() + "\n"
|
||||
|
||||
|
||||
def _iter_entity_kd_files(rows: list[WorkItem]) -> list[tuple[str, object]]:
|
||||
"""
|
||||
Собирает список файлов КД по всем уникальным сущностям в выгрузке.
|
||||
|
||||
Возвращаем список (kind, file_field):
|
||||
- kind нужен для подсказок/manifest, но имя файла берём из исходного поля.
|
||||
"""
|
||||
seen: set[int] = set()
|
||||
out: list[tuple[str, object]] = []
|
||||
for wi in rows:
|
||||
ent = getattr(wi, "entity", None)
|
||||
if not ent:
|
||||
continue
|
||||
eid = int(getattr(ent, "id", 0) or 0)
|
||||
if eid <= 0 or eid in seen:
|
||||
continue
|
||||
seen.add(eid)
|
||||
|
||||
dxf = getattr(ent, "dxf_file", None)
|
||||
pdf = getattr(ent, "pdf_main", None)
|
||||
if dxf:
|
||||
out.append(("dxf_iges_step", dxf))
|
||||
if pdf:
|
||||
out.append(("pdf", pdf))
|
||||
return out
|
||||
|
||||
|
||||
def _zip_write_filefield(zf: zipfile.ZipFile, arc_path: str, file_field) -> int:
|
||||
"""
|
||||
Записывает FileField в архив.
|
||||
|
||||
Возвращает размер в байтах (для manifest).
|
||||
"""
|
||||
file_field.open("rb")
|
||||
try:
|
||||
data = file_field.read()
|
||||
finally:
|
||||
try:
|
||||
file_field.close()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
zf.writestr(arc_path, data)
|
||||
return len(data)
|
||||
|
||||
|
||||
def build_workitem_registry_export_zip(
|
||||
*,
|
||||
request,
|
||||
workitem_ids: list[int],
|
||||
translit: bool,
|
||||
only_work_deals: bool,
|
||||
) -> tuple[bytes, str]:
|
||||
"""
|
||||
Главная точка входа сервиса.
|
||||
|
||||
Возвращает:
|
||||
- zip_bytes: содержимое архива (в памяти)
|
||||
- filename: имя файла для Content-Disposition
|
||||
"""
|
||||
logger.info(
|
||||
"fn:start build_workitem_registry_export_zip user_id=%s workitems=%s translit=%s only_work_deals=%s",
|
||||
getattr(getattr(request, "user", None), "id", None),
|
||||
len(workitem_ids or []),
|
||||
int(bool(translit)),
|
||||
int(bool(only_work_deals)),
|
||||
)
|
||||
|
||||
try:
|
||||
printed_at = timezone.now()
|
||||
print_date = _parse_print_date_from_request(request)
|
||||
export_bar_text = f"Сформировано: {printed_at.strftime('%d.%m.%Y %H:%M')}"
|
||||
|
||||
ctx = _ExportContext(
|
||||
printed_at=printed_at,
|
||||
print_date=print_date,
|
||||
export_bar_text=export_bar_text,
|
||||
only_work_deals=bool(only_work_deals),
|
||||
translit=bool(translit),
|
||||
)
|
||||
|
||||
rows = _fetch_workitems_by_ids(workitem_ids=list(workitem_ids or []))
|
||||
|
||||
groups = _group_workitems(rows)
|
||||
html = _render_html(request=request, groups=groups, ctx=ctx)
|
||||
txt = _render_txt(groups=groups, ctx=ctx)
|
||||
|
||||
safe_ts = printed_at.strftime("%Y%m%d_%H%M%S")
|
||||
base_name = f"shift_task_{safe_ts}"
|
||||
if translit:
|
||||
base_name = _maybe_translit_and_sanitize(base_name, translit=True)
|
||||
zip_filename = f"{base_name}.zip"
|
||||
|
||||
buf = io.BytesIO()
|
||||
manifest_lines: list[str] = []
|
||||
manifest_lines.append("MES export manifest")
|
||||
manifest_lines.append(f"generated_at={printed_at.isoformat()}")
|
||||
manifest_lines.append(f"only_work_deals={int(bool(only_work_deals))}")
|
||||
manifest_lines.append(f"translit={int(bool(translit))}")
|
||||
manifest_lines.append(f"rows={len(rows)}")
|
||||
manifest_lines.append(f"groups={len(groups)}")
|
||||
manifest_lines.append("")
|
||||
|
||||
with zipfile.ZipFile(buf, "w", compression=zipfile.ZIP_DEFLATED) as zf:
|
||||
html_name = "shift_task.html"
|
||||
txt_name = "shift_task.txt"
|
||||
man_name = "manifest.txt"
|
||||
|
||||
zf.writestr(html_name, html.encode("utf-8"))
|
||||
zf.writestr(txt_name, txt.encode("utf-8"))
|
||||
|
||||
kd_entries: list[str] = []
|
||||
|
||||
entities: dict[int, object] = {}
|
||||
entity_plan_qty: dict[int, int] = {}
|
||||
|
||||
for wi in rows:
|
||||
ent = getattr(wi, 'entity', None)
|
||||
if not ent:
|
||||
continue
|
||||
|
||||
eid = int(getattr(ent, 'id', 0) or 0)
|
||||
if eid <= 0:
|
||||
continue
|
||||
|
||||
if eid not in entities:
|
||||
entities[eid] = ent
|
||||
|
||||
qty = int(getattr(wi, 'quantity_plan', 0) or 0)
|
||||
entity_plan_qty[eid] = int(entity_plan_qty.get(eid, 0) or 0) + max(0, qty)
|
||||
|
||||
for eid, ent in entities.items():
|
||||
mat = getattr(ent, 'planned_material', None)
|
||||
mat_label = 'Без_материала'
|
||||
if mat:
|
||||
mat_label = (getattr(mat, 'full_name', '') or getattr(mat, 'name', '') or 'Без_материала').strip() or 'Без_материала'
|
||||
|
||||
folder = _maybe_translit_and_sanitize(mat_label, translit=bool(translit))
|
||||
|
||||
dno = (getattr(ent, 'drawing_number', '') or '').strip()
|
||||
ename = (getattr(ent, 'name', '') or '').strip()
|
||||
base = f"{dno} {ename}".strip() or f"entity_{getattr(ent, 'id', '')}"
|
||||
base = _maybe_translit_and_sanitize(base, translit=bool(translit))
|
||||
|
||||
plan_n = int(entity_plan_qty.get(int(eid), 0) or 0)
|
||||
suffix = f" n{plan_n}" if plan_n > 0 else ""
|
||||
|
||||
for ff in [getattr(ent, 'dxf_file', None), getattr(ent, 'pdf_main', None)]:
|
||||
if not ff:
|
||||
continue
|
||||
|
||||
src_name = (getattr(ff, 'name', '') or '').strip()
|
||||
ext = ''
|
||||
if '.' in src_name:
|
||||
ext = '.' + src_name.split('.')[-1].lower()
|
||||
|
||||
arc_name = f"{base}{suffix}{ext}"
|
||||
arc_name = _maybe_translit_and_sanitize(arc_name, translit=False)
|
||||
|
||||
arc_path = f"kd/{folder}/{arc_name}"
|
||||
try:
|
||||
size = _zip_write_filefield(zf, arc_path, ff)
|
||||
kd_entries.append(f"{arc_path}\t{size}")
|
||||
except Exception:
|
||||
logger.exception('fn:error export_kd_file arc_path=%s', arc_path)
|
||||
kd_entries.append(f"{arc_path}\tERROR")
|
||||
|
||||
manifest_lines.append("files:")
|
||||
manifest_lines.append(f"- {html_name}")
|
||||
manifest_lines.append(f"- {txt_name}")
|
||||
manifest_lines.append(f"- {man_name}")
|
||||
manifest_lines.append("")
|
||||
manifest_lines.append("kd_files:")
|
||||
if kd_entries:
|
||||
manifest_lines.extend(kd_entries)
|
||||
else:
|
||||
manifest_lines.append("(none)")
|
||||
|
||||
zf.writestr(man_name, ("\n".join(manifest_lines).strip() + "\n").encode("utf-8"))
|
||||
|
||||
buf.seek(0)
|
||||
data = buf.getvalue()
|
||||
|
||||
logger.info(
|
||||
"fn:done build_workitem_registry_export_zip bytes=%s rows=%s groups=%s",
|
||||
len(data),
|
||||
len(rows),
|
||||
len(groups),
|
||||
)
|
||||
return data, zip_filename
|
||||
|
||||
except Exception:
|
||||
logger.exception("fn:error build_workitem_registry_export_zip")
|
||||
raise
|
||||
Reference in New Issue
Block a user