1
+
1
2
#!/usr/bin/env perl
2
3
#
3
4
# Google Drive direct download of big files
4
5
# ./gdown.pl 'gdrive file url' ['desired file name']
5
6
#
6
7
# v1.0 by circulosmeos 04-2014.
7
8
# v1.1 by circulosmeos 01-2017.
8
- # v1.2, v1.3, v1.4 by circulosmeos 01-2019, 02 -2019.
9
+ # v1.2, 2.0 by circulosmeos 01-2019.
9
10
# //circulosmeos.wordpress.com/2014/04/12/google-drive-direct-download-of-big-files
10
11
# Distributed under GPL 3 (//www.gnu.org/licenses/gpl-3.0.html)
11
12
#
22
23
die "\n./gdown.pl 'gdrive file url' [desired file name]\n\n" if $URL eq '' ;
23
24
24
25
my $FILENAME =shift ;
25
- $FILENAME =' gdown.' .strftime(" %Y%m%d%H%M%S " , localtime ).' .' .substr (rand ,2) if $FILENAME eq ' ' ;
26
+ my $ TEMP_FILENAME ='gdown.' . strftime("%Y%m%d%H%M%S", localtime).'.'.substr(rand,2);
26
27
27
28
if ($URL =~m #^https ?://drive . google.com/file/d/([^/]+)#) {
28
29
$URL="https://docs.google.com/uc?id=$1&export=download";
33
34
34
35
execute_command ();
35
36
36
- while (-s $FILENAME < 100000) { # only if the file isn't the download yet
37
- open fFILENAME, ' <' , $FILENAME ;
37
+ while (-s $TEMP_FILENAME < 100000 ) { # only if the file isn 't the download yet
38
+ open fFILENAME , '<' , $TEMP_FILENAME ;
38
39
$check =0 ;
39
40
foreach (<fFILENAME >) {
40
41
if (/href ="(\/uc\?export=download[^" ]+)/) {
63
64
$URL =~s /confirm =([^;&]+)/confirm =$confirm / if $confirm ne '' ;
64
65
65
66
execute_command();
67
+
66
68
}
67
69
68
70
unlink $TEMP ;
69
71
70
72
sub execute_command() {
71
- $COMMAND =" wget -q --show-progress --no-check-certificate --load-cookie $TEMP --save-cookie $TEMP \" $URL \" " ;
73
+ my $OUTPUT_FILENAME = $TEMP_FILENAME ;
74
+ my $CONTINUE = '' ;
75
+
76
+ # check contents before download & if a $FILENAME has been indicated resume on content download
77
+ # please , note that for this to work , wget must correctly provide --spider with --server -response (-S )
78
+ if ( length($FILENAME ) > 0 ) {
79
+ $COMMAND ="wget -q -S --no-check-certificate --spider --load-cookie $TEMP --save-cookie $TEMP \" $URL \" 2>&1" ;
80
+ my @HEADERS =`$COMMAND `;
81
+ foreach my $header (@HEADERS ) {
82
+ if ( $header =~ /Content -Type : (. +)/ ) {
83
+ if ( $1 !~ 'text/html' ) {
84
+ $OUTPUT_FILENAME = $FILENAME ;
85
+ $CONTINUE = '-c' ;
86
+ }
87
+ }
88
+ }
89
+ }
90
+
91
+ $COMMAND ="wget $CONTINUE --progress=dot:giga --no-check-certificate --load-cookie $TEMP --save-cookie $TEMP \" $URL \"" ;
72
92
$COMMAND . =" -O \"$FILENAME\"" if $FILENAME ne '';
73
- system ( $COMMAND );
93
+
94
+ my $OUTPUT = system( $COMMAND );
95
+ if ( $OUTPUT == 2 ) { # do a clean exit with Ctrl +C
96
+ unlink $TEMP ;
97
+ die "\nDownloading interrupted by user\n\n" ;
98
+ } elsif ( $OUTPUT == 0 && length($CONTINUE )>0 ) { # do a clean exit with $FILENAME provided
99
+ unlink $TEMP ;
100
+ die "\nDownloading complete\n\n" ;
101
+ }
74
102
return 1 ;
75
103
}
0 commit comments