feat: 初始化零工后端代码

This commit is contained in:
Daniel
2026-04-01 14:19:25 +08:00
parent c6fabe262c
commit 84f8be7c0e
41 changed files with 2813 additions and 147 deletions

View File

@@ -10,8 +10,10 @@ from app.domain.schemas import JobCard, MatchBreakdown, MatchResult, QueryFilter
from app.repositories.job_repository import JobRepository
from app.repositories.match_repository import MatchRepository
from app.repositories.worker_repository import WorkerRepository
from app.services.cache_service import get_match_cache
from app.services.card_mapper import job_to_card, worker_to_card
from app.services.rag.lightrag_adapter import LightRAGAdapter
from app.services.weight_service import MatchWeightService
from app.utils.ids import generate_id
@@ -23,9 +25,16 @@ class MatchingService:
self.workers = WorkerRepository(db)
self.matches = MatchRepository(db)
self.rag = LightRAGAdapter(self.settings)
self.weight_service = MatchWeightService(self.settings)
self.cache = get_match_cache()
def match_workers(self, source: JobCard, top_n: int) -> list[MatchResult]:
logger.info("match_workers source_id=%s top_n=%s", source.job_id, top_n)
cache_key = f"match_workers:{source.job_id}:{top_n}"
if self.settings.match_cache_enabled:
cached = self.cache.get(cache_key)
if cached is not None:
return self._parse_cached_matches(cached)
query_text = " ".join([source.title, source.category, source.city, source.region, *source.skills, *source.tags])
candidate_ids = self.rag.search(
query_text=query_text,
@@ -36,10 +45,17 @@ class MatchingService:
results = [self._build_job_to_worker_match(source, worker_to_card(worker)) for worker in candidates]
results = sorted(results, key=lambda item: item.match_score, reverse=True)[:top_n]
self.matches.bulk_replace(results, SourceType.job_to_worker.value, source.job_id)
if self.settings.match_cache_enabled:
self.cache.set(cache_key, [item.model_dump(mode="json") for item in results])
return results
def match_jobs(self, source: WorkerCard, top_n: int) -> list[MatchResult]:
logger.info("match_jobs source_id=%s top_n=%s", source.worker_id, top_n)
cache_key = f"match_jobs:{source.worker_id}:{top_n}"
if self.settings.match_cache_enabled:
cached = self.cache.get(cache_key)
if cached is not None:
return self._parse_cached_matches(cached)
query_text = " ".join([source.name, *source.cities, *source.regions, *[item.name for item in source.skills], *source.experience_tags])
city = source.cities[0] if source.cities else None
candidate_ids = self.rag.search(
@@ -51,6 +67,8 @@ class MatchingService:
results = [self._build_worker_to_job_match(source, job_to_card(job)) for job in candidates]
results = sorted(results, key=lambda item: item.match_score, reverse=True)[:top_n]
self.matches.bulk_replace(results, SourceType.worker_to_job.value, source.worker_id)
if self.settings.match_cache_enabled:
self.cache.set(cache_key, [item.model_dump(mode="json") for item in results])
return results
def explain(self, match_id: str) -> MatchResult | None:
@@ -61,6 +79,20 @@ class MatchingService:
return match_record_to_schema(record)
def feedback(self, match_id: str, accepted: bool) -> dict[str, float] | None:
record = self.matches.get(match_id)
if record is None:
return None
from app.services.card_mapper import match_record_to_schema
match = match_record_to_schema(record)
if self.settings.ranking_learning_enabled:
return self.weight_service.update_from_feedback(match.breakdown, accepted)
return self.weight_service.get_weights()
def current_weights(self) -> dict[str, float]:
return self.weight_service.get_weights()
def _build_job_to_worker_match(self, job: JobCard, worker: WorkerCard) -> MatchResult:
job_skills = set(job.skills)
expanded_skills = self.rag.expand_skills(job.skills)
@@ -143,13 +175,14 @@ class MatchingService:
experience_score: float,
reliability_score: float,
) -> float:
return (
self.settings.score_skill_weight * skill_score
+ self.settings.score_region_weight * region_score
+ self.settings.score_time_weight * time_score
+ self.settings.score_experience_weight * experience_score
+ self.settings.score_reliability_weight * reliability_score
breakdown = MatchBreakdown(
skill_score=skill_score,
region_score=region_score,
time_score=time_score,
experience_score=experience_score,
reliability_score=reliability_score,
)
return self.weight_service.score(breakdown)
def _build_reasons(
self,
@@ -176,3 +209,10 @@ class MatchingService:
while len(reasons) < 3:
reasons.append("岗位需求与候选画像存在基础匹配")
return reasons[:5]
def _parse_cached_matches(self, cached) -> list[MatchResult]:
if isinstance(cached, list) and cached and isinstance(cached[0], MatchResult):
return cached
if isinstance(cached, list):
return [MatchResult(**item) for item in cached]
return []