-
Notifications
You must be signed in to change notification settings - Fork 1
/
EbbHelper.class.php
599 lines (461 loc) · 20.7 KB
/
EbbHelper.class.php
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
<?php
require_once('vendor/autoload.php');
use Google\Cloud\Storage\StorageClient;
use League\Flysystem\Filesystem;
use League\Flysystem\Adapter\Local;
use Superbalist\Flysystem\GoogleStorage\GoogleStorageAdapter;
//TODO, this system should be smart enough to detect a missing bucket and create it dynamically (as nearline class storage)
//So that you did not need to create a new bucket on https://console.cloud.google.com/storage/browser/
//which is required to use this now..
// a couple of low level helpers for downloading stuff..
//made into functions on an object that understands how to work with google files
class EbbHelper {
public $cloud_file_list = [];
public $bucket = '';
public $projectId = '';
public $keyFilePath = '';
public $adapter;
public $filesystem;
private $is_debug = false;
public function __construct($projectId,$bucket,$keyFilePath){
if(!file_exists($keyFilePath)){
echo "Fatal Error: $keyFilePath does not exist\n";
exit(-1);
}
$this->projectId = $projectId;
$this->bucket = $bucket;
$this->keyFilePath = $keyFilePath;
putenv("GOOGLE_APPLICATION_CREDENTIALS=$keyFilePath");
$storageClient = new StorageClient([
'projectId' => $projectId,
]);
$_ENV['SUPPRESS_GCLOUD_CREDS_WARNING'] = true;
$bucket = $storageClient->bucket($bucket);
$this->adapter = new GoogleStorageAdapter($storageClient, $bucket);
$this->filesystem = new Filesystem($this->adapter);
$this->updateCloudFileList();
}
// get the list of files for this bucket and path..
// and store it locally...
public function updateCloudFileList(){
$is_recursive = true;
$dir_contents = $this->filesystem->listContents('/',$is_recursive);
foreach($dir_contents AS $object){
//echo $object['basename'].' is located at '.$object['path'].' and is a '.$object['type']."\n";
$this->cloud_file_list[] = $object['basename'];
}
}
/*
Lets you download the latest version of a file in any sub-path (in the bucket and underneath the prefix)
to a local directory of your preference..
if they download file is a zip/tar/gzip/etc file... it will extract the contents to the directory you specified.
This understands the cloud files, if you have multiple types of files in the sub-dir you specify..
It will download the latest version of all of the files..
arguments:
$sub_path - the sub-directory (underneath the bucket and prefix) that you want to download from
$local_dir - the local subdirectory..
returns
false on fail..
current file name on success...
*/
public function downloadLatestMirror($sub_path,$local_dir){
$sub_path = rtrim($sub_path,'/').'/';
echo "Mirroring cloud file in $sub_path to $local_dir\n";
$local_FS = new Filesystem(new Local($local_dir));
$is_recursive = true;
$dir_contents = $this->filesystem->listContents($sub_path,$is_recursive);
$latest_date = '';
foreach($dir_contents as $object){
$full_path = $object['path'];
$basename = $object['basename'];
if($this->filesystem->has($full_path)){
$file_data = self::parseCloudFile($basename);
$date = $file_data['date'];
if($latest_date < $date){
$current_file = $full_path;
$current_basemame = $basename;
$current_filedata = $file_data;
$latest_date = $date;
}
}else{
echo "Error: lookup for $full_path failed\n";
exit(-1);
}
}
if($latest_date != ''){
echo "The latest date was $date\n";
echo "Cloning $current_file to $local_dir \n";
$contents = $this->filesystem->read($current_file);
$local_FS->put($basename,$contents);
$full_path = "$local_dir/$basename";
$file_name = pathinfo($full_path,PATHINFO_FILENAME);
$extension_to_extract_map = [
'tar' => "tar -xvf $full_path -C $local_dir",
'tgz' => "tar -xzvf $full_path -C $local_dir",
'tar.gz' => "tar -xzvf $full_path -C $local_dir",
'zip' => "unzip $full_path -f $local_dir" ,
'gzip' => "gzip -dkc < $full_path > $local_dir/$file_name",
];
$current_extension = $current_filedata['extension'];
if(isset($extension_to_extract_map[$current_extension])){
//then this is a compressed file and we sould un compress it.
$cmd = $extension_to_extract_map[$current_extension];
echo "Using $cmd to decompress file\n";
system($cmd);
}
return($full_path);
}else{
echo "Error: I found no file under $sub_path\n";
//not exiting.. might want to keep on rolling..
return(false);
}
}
/*
given a cloudfile name return the basic name, md5 and date as array
*/
public static function parseCloudFile($cloud_file){
$file_array = explode('.',$cloud_file);
if(count($file_array) < 4){
echo "Error: A cloud file needs at least 4 segments $cloud_file fails\n";
exit(-1);
}
$extension = array_pop($file_array);
$date = array_pop($file_array);
if($date == 'tar' && $extension = 'gz'){
$extension = "tar.gz";
$date = array_pop($file_array);
}
$md5 = array_pop($file_array);
$name = implode($file_array);
$return_me = [
'name' => $name,
'date' => $date,
'md5' => $md5,
'extension' => $extension,
];
return($return_me);
}
/*
Downloads a file from the web to a local subdirectory.. and automatically uploads the same file up the cloud
$sub_dir = the location of the local working directory
$url = the url to download and then subseqeuntly mirror
$filename = the filename to use when uploading the file, if not set defaults to the basename from pathinfo()
$is_use_cloud_name = should we add a md5 to the filename that we upload to the cloud.. defaults to true, set this to false to make the "current" version of files...
return integer compatible with the overall EBB return strategy
-1 for an error
0 for file has not changed
1 for new file with new data
*/
public function mirror_that($sub_dir,$url,$filename = null,$is_use_cloud_name = true){
if(is_null($url)){
echo "Error: you tried to run mirror_that with a null instead of a url\n";
exit(-1);
}
if(is_array($url)){
echo "Error: you tried to run mirror_that with an array instead of a url...\n";
var_export($url);
echo "Error: you tried to run mirror_that with an array instead of a url...\n";
exit(-1);
}
if (filter_var($url, FILTER_VALIDATE_URL) === FALSE) {
echo "$url is Not a valid URL\n";
exit(-1);
}
if(is_null($filename)){
//lets try to calculate it from the url...
$parseurl = parse_url($url);
$path = $parseurl['path'];
$pathinfo = pathinfo($path);
$filename = $pathinfo['basename'];
}
$local_tmp_file = __DIR__ . "/data/$filename"; //this is where we put the local copy...
$is_downloaded = self::downloadFile($url,$local_tmp_file); //call our curl download function, which will save the file into the local copy
if($is_downloaded){
$local_cloud_file = $this->rename_local_file_to_cloud_version($local_tmp_file); //get a version of the file that is dated with an md5 string
if($is_use_cloud_name){
$cloud_file_name = pathinfo($local_cloud_file,PATHINFO_BASENAME);
}else{
$cloud_file_name = $filename; //just use the name as written
}
if(!in_array($cloud_file_name,$this->cloud_file_list)){
//this means we have at least one new data file..
$is_new_data = true;
echo "UPLOADING: \t$filename to $cloud_file_name from $local_cloud_file!!\n";
$cloud_file_contents = file_get_contents($local_cloud_file); //load the data into php memory...
$cloud_file_path = "$sub_dir/$cloud_file_name";
$this->filesystem->write($cloud_file_path,$cloud_file_contents); //savve the file to google cloud.
return(1);
}else{
echo "REDUNDANT: \tThe $filename is redundant to a file already in the cloud... doing nothing...\n";
return(0);
}
}else{
echo "Could not download $url\n";
return(-1);
}
}
/*
Works exactly like mirror_that, except it understands specifically how to work with the socrata API to
check the file integrity etc etc..
it knows how to check its arguments for validity and then call the downloadSocrataFile instead of downloadFile
which will run additional checks to make sure things work correctly...
$sub_dir - the subdirectory of the cloud bucket to use
$base_url - the socrata base url...
$socrata_four_by_four - the socrata four by four
$filename_stub - the filename to use as the prefix for all files gooddataset will result in gooddataset.four-four.MD5.date.zip etc etc..
$is_use_cloud_name - should we put an md5 and/or date in the new name of the file..
*/
public function mirror_that_socrata_id($sub_dir,$base_url,$socrata_four_by_four,$filename_stub,$is_use_cloud_name = true){
if(is_null($base_url)){
echo "Error: you tried to run mirror_that with a null instead of a base_url\n";
exit(-1);
}
if(is_null($socrata_four_by_four)){
echo "Error: you tried to run mirror_that_socrata_url with a null instead of a socrata four by four\n";
exit(-1);
}
if(is_array($base_url)){
echo "Error: you tried to run mirror_that_socrata_url with an array instead of a url...\n";
var_export($base_url);
echo "Error: you tried to run mirror_that_socrata_url with an array instead of a url...\n";
exit(-1);
}
if (filter_var($base_url, FILTER_VALIDATE_URL) === FALSE) {
echo "base_url $base_url is Not a valid URL\n";
exit(-1);
}
$local_tmp_file = __DIR__ . "/data/$filename_stub.$socrata_four_by_four.tgz";
$local_cloud_file = $this->downloadSocrataFile($base_url,$socrata_four_by_four,$filename_stub,$local_tmp_file); //call our curl download function, which will save the file into the local copy
if($local_cloud_file){
$cloud_file_name = pathinfo($local_cloud_file,PATHINFO_BASENAME);
if(!in_array($cloud_file_name,$this->cloud_file_list)){
//this means we have at least one new data file..
$is_new_data = true;
echo "UPLOADING: \t$cloud_file_name from $local_cloud_file!!\n";
$cloud_file_contents = file_get_contents($local_cloud_file); //load the data into php memory...
$cloud_file_path = "$sub_dir/$cloud_file_name";
$this->filesystem->write($cloud_file_path,$cloud_file_contents); //savve the file to google cloud.
return(1);
}else{
echo "REDUNDANT: \tThe $cloud_file_name is redundant to a file already in the cloud... doing nothing...\n";
return(0);
}
}else{
echo "Could not download $url\n";
return(-1);
}
}
//accepts a local file name like something.zip and copies it to a new file named with the
//calculate_cloud_file_name function and erases the old file...
public function rename_local_file_to_cloud_version($local_file){
if(!file_exists($local_file)){
echo "Error: trying to run rename_local_file_to_cloud_version function on $local_file and it does not exist\n";
exit(-1);
}
if(is_dir($local_file)){
echo "Error: trying to run rename_local_file_to_cloud_version function on $local_file and it is a directory, this does not work on directories\n";
exit(-1);
}
$cloud_file_name = $this->calculate_cloud_file_name($local_file);
$pathinfo = pathinfo($local_file);
$dirname = $pathinfo['dirname'];
$new_full_path = "$dirname/$cloud_file_name";
rename($local_file,$new_full_path);
if(file_exists($new_full_path)){
return($new_full_path);
}else{
echo "Error: I tried to move $local_file to $new_full_path in rename_local_file_to_cloud_version() and the new file does not exist\n";
exit(-1);
}
}
//we need a reliable way to translate a file name into a dated and md5ed version of itself.
//this function handles that file name transition...
//something.ASDFAERWADASDADSAD.2001-03-10.zip where
//the form is original_file_name.MD5SumOfFile.todaysMySQLFormatDate.original_file_type
//then erases the old file. It is smart enough to do this in the same folder as the original file...
public function calculate_cloud_file_name($local_file, $md5_arg = null, $date_string_arg = null){
$pathinfo = pathinfo($local_file);
$file_name_first_part = $pathinfo['filename'];
$file_extension = $pathinfo['extension'];
if(is_null($md5_arg)){
$my_md5 = md5_file($local_file);
}else{
$my_md5 = $md5_arg; //have an argument allows us to have a different method for calculating the md5 of zip files etc.
}
if(is_null($date_string_arg)){
$mysql_today_datestring = date("Y-m-d");
}else{
$mysql_today_datestring = $date_string_arg; //not actually sure why you would need this.. but.
}
$new_last_name = "$my_md5.$mysql_today_datestring.$file_extension";
//we technically do not know how many characters this is, because $file_extension could be very long...
$max_total_string_len = 500; //this will keep us well within the 1024 byte name limit for google files
//https://cloud.google.com/storage/docs/naming
$tail_strlen = strlen($new_last_name);
$left_over_space = $max_total_string_len - $tail_strlen;
if(strlen($file_name_first_part) > $left_over_space){
$new_first_name = substr($file_name_first_part,0,$left_over_space);
}else{
$new_first_name = $file_name_first_part;
}
$cloud_file_name = "$new_first_name.$new_last_name";
return($cloud_file_name);
}
/*
Is essentially identical to downloadFile.. except that it understands how to download Socrata meta data
as well as csv, and store both the metadata and the csv file into a zip file
and make sure that a basic sense check on the data has occured..
implemented as multiple calls back the original downloadFile()
arguments
$base_url - the socrata base url (something like https://data.cms.gov/)
$socrata_four_by_four - the dataset identifier like je57-c47h in https://data.cms.gov/Medicare-Enrollment/Address-Sub-File/je57-c47h
$target_tar_file - where to put the zip file that is built from downloaded files...
returns
$zip_file_path - the real zip file path (includes an md5 and date);
*/
public function downloadSocrataFile($base_url, $socrata_four_by_four, $filename_stub, $target_tar_file){
//we are going to be building a zip file...
//and we want to put all of the component files into the same working directory...
//we use pathinfo to figure out where that is.
$pathinfo = pathinfo($target_tar_file);
$out_dir = $pathinfo['dirname']. "/"; //this is where we will put all of the file we download before we zip them up...
$just_tar_file = $pathinfo['basename'];
$base_url = rtrim($base_url,'/'); //its easier to remove it and then add it back...
$metadata_url = "$base_url/api/views/$socrata_four_by_four.json";
$csv_download_url = "$base_url/api/views/$socrata_four_by_four/rows.csv?accessType=DOWNLOAD";
$row_count_url = "$base_url/api/id/$socrata_four_by_four?\$query=select%20count(*)%20as%20COLUMN_ALIAS_GUARD__count";
$metadata_filename = "$out_dir$filename_stub.$socrata_four_by_four.metadata.json";
$rowcount_filename = "$out_dir$filename_stub.$socrata_four_by_four.rowcount.json";
$data_filename = "$out_dir$filename_stub.$socrata_four_by_four.data.csv";
//sometimes we want to do exactly the same thing to all of these files... so lets make a list to loop over..
$to_download_list = [
$metadata_filename => $metadata_url,
$rowcount_filename => $row_count_url,
$data_filename => $csv_download_url,
];
foreach($to_download_list as $filename => $url){
self::downloadFile($url,$filename);
}
//lets get the row count from the api. This is the entire reason we get this file..
$row_count_data = json_decode(file_get_contents($rowcount_filename),true);
if(isset($row_count_data[0]['column_alias_guard__count'])){
$api_row_count = $row_count_data[0]['column_alias_guard__count'];
//ok now we know how many rows the API thinks there are!!
}else{
echo "Error: could not parse $rowcount_filename to get the row count information\n";
exit(-1);
}
//how many lines are there actually. Nothing is faster for this than the wc command...
$wc_cmd = "wc -l $data_filename";
$wc_result = exec($wc_cmd);
list($real_row_count,$trash) = explode(' ',$wc_result);
//the csv file has column headers, and should be exactly one row large than the row count from the socrata API.
if($real_row_count == ($api_row_count + 1)){
//then we are good!!
}else{
echo "Error: We got $api_row_count from the API, and $real_row_count from the downloaded csv file... something went wrong...\n";
exit(-1);
}
//ok we now have all 3 files downloaded... we need to zip them...
/*
//it is not possible, as far as I can tell, to use the -X flag with ZipArchive.. and as a result the md5 of zip files
//created in subsequent runs of this code willhave different md5... even though they have identical contents..
//so we cannot use this method to zip the files...
$zip = new ZipArchive;
if($zip->open($zip_file_path, ZipArchive::CREATE) === TRUE){
foreach($to_download_list as $filename => $url){
$short_filename = pathinfo($filename,PATHINFO_BASENAME);
$zip->addFile($filename,"$socrata_four_by_four/$short_filename"); //lets add the files, but not make a tarbomb
}
$zip->close();
}else{
echo "Error: I was not able to create zip file $zip_file_path in downloadSocrataFile\n";
exit(-1);
}
*/
//gzip has the same problem... according to this:
//https://serverfault.com/a/775740/72025
//the GZIP temporary environment variable should sort it...
$year = date('Y');
//you have to do all of the things here:
//https://reproducible-builds.org/docs/archives/
//this seems to work for a few moments and then no longer match.
//including it here to prevent some future developer (i.e. myself) from rabbit holing on this..
//you have to do all of the things here:
//https://reproducible-builds.org/docs/archives/
//this seems to work for a few moments and then no longer match.
//including it here to prevent some future developer (i.e. myself) from rabbit holing on this..
//$tar_cmd = "tar --sort=name --mtime='$year-01-01 00:00Z' --owner=0 --group=0 --numeric-owner -cf $zip_file_path_template.tar ";
//that does not fucking work.
//this command should not need me to change directory to the outdir..
$tar_cmd = "tar -C $out_dir -czf $just_tar_file";
//but 'should' is a terrible terrible word
chdir($out_dir);
$merged_md5_string = '';
foreach($to_download_list as $full_filename => $url){
$filename = pathinfo($full_filename,PATHINFO_BASENAME);
$tar_cmd .= " $filename";
$this_md5_string = md5_file($full_filename);
// echo "Got $this_md5_string md5 for $filename\n";
$merged_md5_string .= $this_md5_string;
}
//echo "Tarring with $tar_cmd\n";
system($tar_cmd);
chdir(__DIR__); //go back
$meta_md5 = md5($merged_md5_string);
$cloud_file_name = $this->calculate_cloud_file_name($target_tar_file, $meta_md5);
//echo "Merging $merged_md5_string into $meta_md5 for $cloud_file_name\n";
$local_cloud_file = "$out_dir/$cloud_file_name";
rename($target_tar_file,$local_cloud_file);
//if we get all the way here then we have built the zip file correctly...
return($local_cloud_file);
}
/*
// modified from https://stackoverflow.com/a/35271138/144364
//accepts a url to download, and a local file path to save the data to...
//returns true if the file exists and it is not a 404 result...
arguments:
$url - the url to download
$filepath - the local file to store it in..
*/
public static function downloadFile($url, $filepath){
$is_debug = false;
if($is_debug){
if(file_exists($filepath)){
//in debug mode, we just assume that if the file already exists... then it is the current download...
echo "Warning!! in debug mode, not downloading $url because $filepath already exists..\n";
return(true);
}
}
//echo "Downloading $url...";
$fp = fopen($filepath, 'w+');
if($fp){
$ch = curl_init($url);
curl_setopt($ch, CURLOPT_RETURNTRANSFER, false);
curl_setopt($ch, CURLOPT_SSL_VERIFYPEER, false );
curl_setopt($ch, CURLOPT_CONNECTTIMEOUT, 30);
curl_setopt($ch, CURLOPT_TIMEOUT, 0); //0 is infinite.
curl_setopt($ch, CURLOPT_FOLLOWLOCATION, true);
curl_setopt($ch, CURLOPT_FILE, $fp);
curl_exec($ch);
$httpCode = curl_getinfo($ch, CURLINFO_HTTP_CODE);
if($httpCode == 404) {
echo "Error: got 404\n";
return(false);
}
curl_close($ch);
fclose($fp);
if(filesize($filepath) > 0){
//echo "done.\n";
return(true);
}else{
echo "Error: download file size was zero trying to save $url to $filepath \n";
return(false);
}
}else{
//got a false file pointer here...
echo "Error: Failed to open $filepath for writing...\n";
exit(-1);
}
}
}