Skip to content

Commit 582e294

Browse files
committed
Updated logging of the id field
1 parent 080c7c4 commit 582e294

File tree

2 files changed

+19
-4
lines changed

2 files changed

+19
-4
lines changed

source/merge-s3-files/app.py

Lines changed: 10 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,7 @@ def lambda_handler(event, context):
1414
bucket = event['bucket']
1515
key = event['key']
1616
to_process_folder = event['toProcessFolder']
17-
17+
data = []
1818
output_path = to_process_folder.replace("to_process", "output")
1919

2020
output = []
@@ -63,6 +63,15 @@ def lambda_handler(event, context):
6363
Key=s3_target_key,
6464
Body=output_body)
6565

66+
line_num = 0
67+
lines = output_body.splitlines();
68+
for line in lines:
69+
words = line.split(",")
70+
if line_num > 0:
71+
data.append(words[0])
72+
line_num += 1
73+
74+
logger.info("Data", input_file=key, data=data)
6675
return {"response": response, "S3OutputFileName": s3_target_key, "originalFileName": key}
6776

6877
except Exception as e:

source/split-ip-file/app.py

Lines changed: 9 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -40,7 +40,11 @@ def lambda_handler(event, context):
4040
# Number of files to be created
4141
num_files = file_count(s3.open(input_file, 'r'), file_delimiter, file_row_limit)
4242
# Split the input file into several files, each with the number of records mentioned in the fileChunkSize parameter.
43-
splitFileNames = split(s3.open(input_file, 'r'), file_delimiter, file_row_limit, output_file_template,
43+
splitFileNames = split(input_file,
44+
s3.open(input_file, 'r'),
45+
file_delimiter,
46+
file_row_limit,
47+
output_file_template,
4448
output_path, True,
4549
num_files)
4650
# Archive the input file.
@@ -65,11 +69,11 @@ def file_count(file_handler, delimiter, row_limit):
6569

6670

6771
# Split the input into several smaller files.
68-
def split(filehandler, delimiter, row_limit, output_name_template, output_path, keep_headers, num_files):
72+
def split(input_file, filehandler, delimiter, row_limit, output_name_template, output_path, keep_headers, num_files):
6973
import csv
7074
reader = csv.reader(filehandler, delimiter=delimiter)
7175
split_file_path = []
72-
76+
data = []
7377
current_piece = 1
7478
current_out_path = os.path.join(
7579
output_path,
@@ -94,6 +98,8 @@ def split(filehandler, delimiter, row_limit, output_name_template, output_path,
9498
if keep_headers:
9599
current_out_writer.writerow(headers)
96100
current_out_writer.writerow(row)
101+
data.append(row[0])
102+
logger.info("Data", input_file=input_file, data=data)
97103
return split_file_path
98104

99105

0 commit comments

Comments
 (0)