Jump to content

Need to transfer files from http url to ftp folder or local folder


danjapro

Recommended Posts

 

I got this script:

 

But it give me error, file_get_contents cannot open stream.

 

I need to add the FTP connection with user/pass paramaters.

 

then look in set http url, to get the file contents(images) and transfer to ftp server location.

 

Can Anyone take alook and tell me if I am going down the right path and how to get there. Please

 

function postToHost($host, $port, $path, $postdata = array(), $filedata = array()) {
$data = "";
$boundary = "---------------------".substr(md5(rand(0,32000)),0,10);
$fp = fsockopen($host, $port);

fputs($fp, "POST $path HTTP/1.0\n");
fputs($fp, "Host: $host\n");
fputs($fp, "Content-type: multipart/form-data; boundary=".$boundary."\n");

// Ab dieser Stelle sammeln wir erstmal alle Daten in einem String
// Sammeln der POST Daten
foreach($postdata as $key => $val){
	$data .= "--$boundary\n";
	$data .= "Content-Disposition: form-data; name=\"".$key."\"\n\n".$val."\n";
}

// Sammeln der FILE Daten
if($filedata) {
	$data .= "--$boundary\n";
	$data .= "Content-Disposition: form-data; name=\"".$filedata['name']."\"; filename=\"".$filedata['name']."\"\n";
	$data .= "Content-Type: ".$filedata['type']."\n";
	$data .= "Content-Transfer-Encoding: binary\n\n";
	$data .= $filedata['data']."\n";
	$data .= "--$boundary--\n";
}

// Senden aller Informationen
fputs($fp, "Content-length: ".strlen($data)."\n\n");
fputs($fp, $data);

// Auslesen der Antwort
while(!feof($fp)) {
	$res .= fread($fp, 1);
}
fclose($fp);

return $res;
}

$postdata = array('var1'=>'today', 'var2'=>'yesterday');
$filedata = array(
'type' => 'image/png',
'data' => file_get_contents('http://xxx/tdr-images/images/mapping/dynamic/deals/spot_map')
);

echo '<pre>'.postToHost ("localhost", 80, "/test3.php", $postdata, $filedata).'</pre>';


Link to comment
Share on other sites

//Initialize the Curl session
$ch = curl_init();

//Set curl to return the data instead of printing it to the browser.
curl_setopt($ch, CURLOPT_RETURNTRANSFER, 1);
//Set the URL
curl_setopt($ch, CURLOPT_URL, $URL);
//Execute the fetch
$data = curl_exec($ch);
//Close the connection
curl_close($ch);

Link to comment
Share on other sites

appreciate the code snippet, but at what point would I insert this into my current code.

 

I think cURL would probably work best, but not that good with implementing this method.

 

Have not found much, help either.

 

Some guidance will be greatly appreciated.

Link to comment
Share on other sites

    function file_get_contents_curl($url) {
    $ch = curl_init();
     
    curl_setopt($ch, CURLOPT_HEADER, 0);
    curl_setopt($ch, CURLOPT_RETURNTRANSFER, 1); //Set curl to return the data instead of printing it to the browser.
    curl_setopt($ch, CURLOPT_URL, $url);
     
    $data = curl_exec($ch);
    curl_close($ch);
     
    return $data;
    }

// to call the function

file_get_contents_curl('http://mywebsite.com');

 

 

Link to comment
Share on other sites

I am getting the files to download to the same directory the script live in, But I need to get them to download to the set $dir .

 

$dir = "/opt/tmp/media/current";

//03-2012. Location of merchant, if no image use place holder
    $thumbnails = implode(',', range(1, 10000));
    $ext = ".png";
    //for($thumbnails = 0; $thumbnails < 10000; $thumbnails++) { 
for($thumbnails = 1; $thumbnails < 500; $thumbnails++) {
        $url = $merch_map.'/'.$thumbnails.''.$ext;   
    
        echo $url . '<br>';
        //echo '<img src='.$url.' border=0/>'; 
       downloader($url, $dir);
    }



function downloader($url){
    $file_name = basename($url);
    $generated_files = geturl($url, $url);
    file_put_contents($file_name, $generated_files);
    $size=strlen($generated_files);
    if($size==0){exit(0);("<b>Error:</b>not found!");}
    //header('Content-type: application/force-download');
    //header('Content-Disposition: attachment; filename=' . $file_name);
    //header('Content-Transfer-Encoding: binary');
    //header('Expires: Sat, 26 Jul 1997 05:00:00 GMT');
    //header('Cache-Control: must-revalidate, post-check=0, pre-check=0');
    //header('Pragma: public');
    //header('Content-Length: ' . $size);
    readfile($file_name);
    unlink($file_name);
}

function geturl($url, $referer) {
    $headers[] = 'Accept: image/png';
    $headers[] = 'Connection: Keep-Alive';
    $headers[] = 'Content-type: application/x-www-form-urlencoded;charset=UTF-8';
    $user_agent = 'ONLY_MY_HOST';
    //$useragent = $url;
    $process = curl_init($url);
    curl_setopt($process, CURLOPT_HTTPHEADER, $headers);
    curl_setopt($process, CURLOPT_HEADER, 0);
    curl_setopt($process, CURLOPT_USERAGENT, $user_agent);
    curl_setopt($process, CURLOPT_REFERER, $referer);
    curl_setopt($process, CURLOPT_TIMEOUT, 30);
    curl_setopt($process, CURLOPT_RETURNTRANSFER, 1);
    curl_setopt($process, CURLOPT_FOLLOWLOCATION, 1);
    $return = curl_exec($process);
    curl_close($process);
    return $return;
}

 

What I'm just missing something, it downloads all images, but not to correct directory.....

Link to comment
Share on other sites

I changed the code up some, I can now get images to download, but still does not download to the $dir, that I have set.

 


function geturl($url, $referer) {
$headers[] = 'Accept: image/png';
$headers[] = 'Connection: Keep-Alive';
$headers[] = 'Content-type: application/x-www-form-urlencoded;charset=UTF-8';
$user_agent = 'ONLY_MY_HOST';
$process = curl_init($url);
curl_setopt($process, CURLOPT_HTTPHEADER, $headers);
curl_setopt($process, CURLOPT_HEADER, 0);
curl_setopt($process, CURLOPT_USERAGENT, $user_agent);
curl_setopt($process, CURLOPT_REFERER, $referer);
curl_setopt($process, CURLOPT_TIMEOUT, 30);
curl_setopt($process, CURLOPT_RETURNTRANSFER, 1);
curl_setopt($process, CURLOPT_FOLLOWLOCATION, 1);
$return = curl_exec($process);
curl_close($process);
return $return;
}

function downloader($url) {
$file_name = basename($url);
$generated_files = geturl($url, $url);

file_put_contents($file_name, $generated_files);
$size = strlen($generated_files);
if ($size == 0) {
	echo '<b>Error:</b>not found!<br />';
}
}

echo 'images be saved to ' . getcwd() . '<br />';

$merch_map = "http://images/images/mapping/dynamic/aaamemberdeals/spot_map"; 
$dir = "C:/wamp/www/merchant_batch/merchant_maps";

$ext = ".png";
for($i = 1; $i < 1000; ++$i) {
$image = "{$i}{$ext}";
$url = "$merch_map/$image";   
echo "$url <br />";
downloader($url, $dir);
echo "<img src=\"$image\" /><br />";
}

 

NO FTP NEED, just http image directory to set folder directory

Link to comment
Share on other sites

I want to know how to grab the $dir, to have it be the downloaded into folder.

 

I can get files download via the $url, I wrote in function to indentify it as the http cURL to use, then as variable to download from.

 

Its the download to. That I am not getting.....

Link to comment
Share on other sites

function grab_image($url,$saveto){
    $ch = curl_init ($url);
    curl_setopt($ch, CURLOPT_HEADER, 0);
    curl_setopt($ch, CURLOPT_RETURNTRANSFER, 1);
    curl_setopt($ch, CURLOPT_BINARYTRANSFER,1);
    $raw=curl_exec($ch);
    curl_close ($ch);
    if(file_exists($saveto)){
        unlink($saveto);
    }
    $fp = fopen($saveto,'x');
    fwrite($fp, $raw);
    fclose($fp);
}

 

in this function you can grab the image then save it to wherever using read,write,save functions.

 

call it like

 

grab_image('http;//mysite.com/image.jpeg','http://mysite.com/images/');

 

this will save the image in the images directory wherever the image is currently.

Link to comment
Share on other sites

darkfreaks,

I tried your piece of code, I get fwrite () expects paramater 1 to resource.

                                                  fopen failed to open stream.

 


function grab_image($url,$saveto){
$ch = curl_init ($url);
curl_setopt($ch, CURLOPT_HEADER, 0);
curl_setopt($ch, CURLOPT_RETURNTRANSFER, 1);
curl_setopt($ch, CURLOPT_BINARYTRANSFER,1);
$raw=curl_exec($ch);
curl_close ($ch);
if(file_exists($saveto)){
	//unlink($saveto);
}
$fp = fopen($saveto,'x');
fwrite($fp, $raw);
fclose($fp);
}

echo 'images be saved to ' . getcwd() . '<br />';

$merch_map = "http://tdr.aaa.com/tdr-images/images/mapping/dynamic/aaamemberdeals/spot_map"; 
$dir = "C:/wamp/www/merchant_batch/merchant_maps";
$saveto = $url;



$ext = ".png";
for($i = 1; $i < 100; ++$i) {
$image = "{$i}{$ext}";
$url = "$merch_map/$image";   
echo "$url <br />";
grab_image($url, $dir);
//echo "<img src=\"$image\" /><br />";
}

Link to comment
Share on other sites

could try a different mode like r+ or w+ see if it makes any difference.

 

 

i bet that is why it is erroring because it is trying to create the image if it does not exist.

 

 


$saveto= fopen($string,'r+'); //read and write file
$saveto = fopen($string,'w+'); //read and write file

 

Link to comment
Share on other sites

 

change it to this per your statment.

Still getting error : falied to open stream with fopen

 


function grab_image($url,$saveto){
$ch = curl_init ($url);
curl_setopt($ch, CURLOPT_HEADER, 0);
curl_setopt($ch, CURLOPT_RETURNTRANSFER, 1);
curl_setopt($ch, CURLOPT_BINARYTRANSFER,1);
$raw=curl_exec($ch);
curl_close ($ch);
if(file_exists($saveto)){
	//unlink($saveto);
}
$fp = fopen($saveto,'x');
//fwrite($fp, $raw);
//fclose($fp);
$saveto= fopen($url,'r+'); //read and write file
$saveto = fopen($url,'w+'); //read and write file
}

echo 'images be saved to ' . getcwd() . '<br />';

$merch_map = "http://xxx/images/mapping/dynamic/aaamemberdeals/spot_map"; 
$dir = "C:/wamp/www/xxx";
$saveto = $url;



$ext = ".png";
for($i = 1; $i < 100; ++$i) {
$image = "{$i}{$ext}";
$url = "$merch_map/$image";   
echo "$url <br />";
grab_image($url, $dir);
//echo "<img src=\"$image\" /><br />";
}

 

See my error, I really need to get this working, can you test it on your end and see if you get to download images from http direcotry to your desktop directory or a posted directory.

 

fopen(http://xxx/images/mapping/dynamic/aaamemberdeals/spot_map/1.png) [function.fopen]: failed to open stream: HTTP wrapper does not support writeable connections in

 

 

Link to comment
Share on other sites

Are you running this on a Linux server? If so, you need to set permissions on the folder you want to upload to so that the use running Apache/PHP can write to it.

 

It seems you're trying to scrape content from another site. I won't help further until you disclose the site you're scraping from. We need to know if they allow you to scrape their content in the ways you're attempting, otherwise we're assisting you in potentially illegal activity.

Link to comment
Share on other sites

Here is full code along with the site.

 

I am trying to grab image of maps for merchants to dump to our local server, then write our php script to read those maps, in-case their server dies or changes.

 



function grab_image($url,$saveto){
$ch = curl_init ($url);
curl_setopt($ch, CURLOPT_HEADER, 0);
curl_setopt($ch, CURLOPT_RETURNTRANSFER, 1);
curl_setopt($ch, CURLOPT_BINARYTRANSFER,1);
$raw=curl_exec($ch);
curl_close ($ch);
if(file_exists($saveto)){
	//unlink($saveto);
}
//$fp = fopen($saveto,'x');
$fp= fopen($saveto, 'x'); 
$fp= fopen($saveto, 'w+'); // read and write
$fp= fopen($saveto, 'r+'); //read and write
}

echo 'images be saved to ' . getcwd() . '<br />';

$merch_map = "http://tdr.aaa.com/tdr-images/images/mapping/dynamic/aaamemberdeals/spot_map"; 
$dir = "C:/wamp/www/merchant_batch/merchant_maps";
//$saveto = $dir;



$ext = ".png";
for($i = 1; $i < 100; ++$i) {
$image = "{$i}{$ext}";
$url = "$merch_map/$image";   
echo "$url <br />";
grab_image($url, $dir);
//echo "<img src=\"$image\" /><br />";
}

 

it still produces this error:

Warning: fopen(C:/wamp/www/merchant_batch/merchant_maps) [function.fopen]: failed to open stream: Permission denied in C:\wamp\www\merchant_batch\download\merch_imgs.php on line 17

 

I am testing it on local setting first, then place into staging, but still errors on local.

 

 

Here is other code I have that works, for grabing the images(maps) but dumps them same directory as script, I wam $dir to be directory to dumps the files too.


function geturl($url, $dir) {
$headers[] = 'Accept: image/png';
$headers[] = 'Connection: Keep-Alive';
$headers[] = 'Content-type: application/x-www-form-urlencoded;charset=UTF-8';
$user_agent = 'ONLY_MY_HOST';
$process = curl_init($url);
curl_setopt($process,CURLOPT_URL,$url); 
curl_setopt($process, CURLOPT_HTTPHEADER, $headers);
curl_setopt($process, CURLOPT_HEADER, 0);
curl_setopt($process, CURLOPT_USERAGENT, $user_agent);
curl_setopt($process, CURLOPT_REFERER, $dir);
curl_setopt($process, CURLOPT_TIMEOUT, 30);
curl_setopt($process, CURLOPT_RETURNTRANSFER, 1);
curl_setopt($process, CURLOPT_FOLLOWLOCATION, 1);
$return = curl_exec($process);
curl_close($process);
return $return;

}

function downloader($url) {
$file_name = basename($url);
$generated_files = geturl($url, $url);
file_put_contents($file_name, $generated_files);
$size = strlen($generated_files);
if ($size == 0) {
	echo '<b>Error:</b>not found!<br />';
}
}

echo 'images be saved to ' . getcwd() . '<br />';

$merch_map = "http://tdr.aaa.com/tdr-images/images/mapping/dynamic/aaamemberdeals/spot_map"; 
$dir = "C:/wamp/www/merchant_batch/merchant_maps";

$ext = ".png";
for($i = 1; $i < 100; ++$i) {
$image = "{$i}{$ext}";
$url = "$merch_map/$image";   
echo "$url <br />";
downloader($url, $dir);
//echo "<img src=\"$image\" /><br />";
}


Link to comment
Share on other sites

In your first snippet, you should echo out $dir in your loop. It will show you why fopen is failing. You aren't actually passing a file name to the grab_image() function, you're passing a folder/directory. Also, nowhere do you use fwrite after you fopen. You can replace that with file_put_contents if you're using PHP5+

 

Though they claim the content is copy-written, I see no mention of scrapers in the ToS. This could be allowed, assuming you credit the source of the images and don't claim them as your own.

Link to comment
Share on other sites

 

This is my new setup code based on your instruction, GURU.

 

function grab_image($url, $saveto){
$ch = curl_init ($url);
curl_setopt($ch, CURLOPT_HEADER, 0);
curl_setopt($ch, CURLOPT_RETURNTRANSFER, 1);
curl_setopt($ch, CURLOPT_BINARYTRANSFER,1);
$raw=curl_exec($ch);
curl_close ($ch);
if(file_exists($saveto)){
	//unlink($saveto);
}
//$fp = fopen($saveto,'x');
file_put_contents($url, $saveto);
}

echo 'images be saved to ' . getcwd() . '<br />';

$merch_map = "http://tdr.aaa.com/tdr-images/images/mapping/dynamic/aaamemberdeals/spot_map"; 
$dir = "C:/wamp/www/merchant_batch/merchant_maps";
//$saveto = $dir;

//echo $dir;


$ext = ".png";
for($i = 1; $i < 100; ++$i) {
$image = "{$i}{$ext}";
$url = "$merch_map/$image";   
echo "$url <br />";
grab_image($url, $dir);
//echo "<img src=\"$image\" /><br />";
}

ini_set('display_errors',1);
error_reporting(-1);

 

I get these errors, which makes no-sens If I am only reading then transferring form the http:

Warning: file_put_contents(http://tdr.aaa.com/tdr-images/images/mapping/dynamic/aaamemberdeals/spot_map/1.png) [function.file-put-contents]: failed to open stream: HTTP wrapper does not support writeable connections in C:\wamp\www\merchant_batch\download\merch_imgs.php on line 16

Link to comment
Share on other sites

You are simply copying and pasting code without first understanding what it does.

 

I provided a link to the manual entry for file_put_contents. You should read it, and try to understand arguments it expects, and what it does.

 

As it is, you're trying to write to the file defined in $url, which is not what you want to do.

 

It seems you're attempting to manipulate code you don't understand. Though I want you to get a working solution, my goal is for you to understand the code you're using. If you need it done for you, due to a deadline, feel free to post a request in the Freelance forum.

 

When you're ready to slow down and learn, we're here to help.

Link to comment
Share on other sites

Actually Guru, I am not.

 

I am combining what I currently have working 50% correct and implementing your insight, ,as best as I can.

 

My orginal code  worked as far as downloading the images, but it ignores my variable $dir, for location to place the file.

SEE my code here that works, you can try it for yourself.

 


function geturl($url, $dir) {
$headers[] = 'Accept: image/png';
$headers[] = 'Connection: Keep-Alive';
$headers[] = 'Content-type: application/x-www-form-urlencoded;charset=UTF-8';
$user_agent = 'ONLY_MY_HOST';
$process = curl_init($url);
curl_setopt($process,CURLOPT_URL,$url); 
curl_setopt($process, CURLOPT_HTTPHEADER, $headers);
curl_setopt($process, CURLOPT_HEADER, 0);
curl_setopt($process, CURLOPT_USERAGENT, $user_agent);
curl_setopt($process, CURLOPT_REFERER, $dir);
curl_setopt($process, CURLOPT_TIMEOUT, 30);
curl_setopt($process, CURLOPT_RETURNTRANSFER, 1);
curl_setopt($process, CURLOPT_FOLLOWLOCATION, 1);
$return = curl_exec($process);
curl_close($process);
return $return;

}

function downloader($url) {
$file_name = basename($url);
$generated_files = geturl($url, $url);
file_put_contents($file_name, $generated_files);
$size = strlen($generated_files);
if ($size == 0) {
	echo '<b>Error:</b>not found!<br />';
}
}

echo 'images be saved to ' . getcwd() . '<br />';

$merch_map = "http://tdr.aaa.com/tdr-images/images/mapping/dynamic/aaamemberdeals/spot_map"; 
$dir = "C:/wamp/www/merchant_batch/merchant_maps";

$ext = ".png";
for($i = 1; $i < 100; ++$i) {
$image = "{$i}{$ext}";
$url = "$merch_map/$image";   
echo "$url <br />";
downloader($url, $dir);
//echo "<img src=\"$image\" /><br />";
}

 

I got several feedback about using fopen, but that create a  stream call to open a protect directory and return error, directory is unable to http open stream.

 

So using my code listed above, how would I just get it to download, but download to the set $dir (directory) not the directory where the code exists.

 

Can you help guide me in that aspect.

 

I am intermediate when it comes to coding, n GURU as you are.

 

 

 

Link to comment
Share on other sites

Guru,

 

I am little

 

You sure file_put_contents( '/path/to/file.txt', 'data to be inserted' );

 

Has to have the actual path written in with file to and the actual path from.

 

I am testing will verify.

 

Thank you very much

Link to comment
Share on other sites

I really need help with this,, I do not understand where to make the file+put_contents read the directory to save to.

 

I changed my code around several times, I keep getting

 

Undefined variable: image

Undefined variable: dir

 

Division by zero.

 

Can you just please help me, please. I am having hardest time,, I am not that good with php and variable reading.

 

I place the variable per development UI, but then rendering code it errors out.

 

$merch_map = "http://tdr.aaa.com/tdr-images/images/mapping/dynamic/aaamemberdeals/spot_map"; 
$dir = "C:/wamp/www/merchant_batch/merchant_maps";

$ext = ".png";
for($i = 1; $i < 100; ++$i) {
$image = "{$i}{$ext}";
$url = "$merch_map/$image";   
echo "$url <br />";
downloader($url, $dir);
//echo "<img src=\"$image\" /><br />";
}

function geturl($url, $dir) {
$headers[] = 'Accept: image/png';
$headers[] = 'Connection: Keep-Alive';
$headers[] = 'Content-type: application/x-www-form-urlencoded;charset=UTF-8';
$user_agent = 'ONLY_MY_HOST';
$process = curl_init($url);
curl_setopt($process,CURLOPT_URL,$url); 
curl_setopt($process, CURLOPT_HTTPHEADER, $headers);
curl_setopt($process, CURLOPT_HEADER, 0);
curl_setopt($process, CURLOPT_USERAGENT, $user_agent);
curl_setopt($process, CURLOPT_REFERER, $dir);
curl_setopt($process, CURLOPT_TIMEOUT, 30);
curl_setopt($process, CURLOPT_RETURNTRANSFER, 1);
curl_setopt($process, CURLOPT_FOLLOWLOCATION, 1);
$return = curl_exec($process);
curl_close($process);
return $return;

}



function downloader($url) {
$file_name = basename($url);
$generated_files = geturl($url, $url);
file_put_contents($dir/$image, $generated_files);
$size = strlen($generated_files);
if ($size == 0) {
	echo '<b>Error:</b>not found!<br />';
}
}


echo 'images be saved to ' . $filename . '<br />';


Link to comment
Share on other sites

Archived

This topic is now archived and is closed to further replies.

×
×
  • Create New...

Important Information

We have placed cookies on your device to help make this website better. You can adjust your cookie settings, otherwise we'll assume you're okay to continue.