File size: 1,526 Bytes
e84e471
7b05dcb
e84e471
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
7b05dcb
 
e84e471
 
 
 
 
 
 
 
 
 
 
7b05dcb
e84e471
 
 
 
 
 
 
7b05dcb
9a3ba2b
e84e471
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
import argparse
import sys
sys.path.append(r"C:\Users\FCI\Desktop\engineero_ai\pg-clean-search")
from app.cleaning.pipeline import run_cleaning_from_yaml

def main():
    """Defines the command-line interface for the cleaning pipeline."""
    p = argparse.ArgumentParser(
        description="Run an in-place data cleaning pipeline on specified PostgreSQL tables.",
        formatter_class=argparse.RawTextHelpFormatter
    )
    p.add_argument(
        "--cfg", 
        # required=True,
        default="scripts\clean_test.yaml", 
        help="YAML file listing source schema, table names, primary keys, and culprit columns."
    )
    p.add_argument(
        "--batch", 
        type=int, 
        default=1000, 
        help=(
            "If --clean-all=False, this is the maximum number of rows read (LIMIT) and processed (1 batch).\n"
            "If --clean-all=True, this is the chunk size for reading/writing multiple batches."
        )
    )
    p.add_argument(
        "--clean-all", 
        action="store_true", 
        help="Process and clean every row in the table (overrides the LIMIT set by --batch)."
    )
    p.add_argument(
        "--clean_cap",
        type=int,
        # default=20,
        help="Maximum number of rows to clean per table."
    )
    
    args = p.parse_args()
    
    run_cleaning_from_yaml(
        args.cfg,
        batch_size=args.batch,
        clean_all=args.clean_all,
        clean_cap=args.clean_cap,   # ← MUST PASS THIS
    )

if __name__ == "__main__":
    main()