ftp://gnss-data.kadaster.nl
) will be permanently shut down on October 1, 2025. Please migrate your scripts and processes to use HTTPS before this date.
Please switch to downloading and mirroring data from https://gnss-data.kadaster.nl
The following examples show how to download GNSS data files using various tools. These examples use real paths from the NSGI GNSS Datacenter.
curl
# Download GNSS observation file with original filename (EIJS station, day 232, 2025) curl -O https://gnss-data.kadaster.nl/data/daily/2025/232/EIJS00NLD_R_20252320000_01D_30S_MO.crx.gz # Download with an alternative name curl -o eijs2320.25d.gz https://gnss-data.kadaster.nl/data/daily/2025/232/EIJS00NLD_R_20252320000_01D_30S_MO.crx.gz
# Download multiple station files using curl's brace expansion curl -O "https://gnss-data.kadaster.nl/data/daily/2025/232/{AMEL,EIJS,VLIS}00NLD_R_20252320000_01D_30S_MO.crx.gz" # Download files from multiple stations individually curl -O https://gnss-data.kadaster.nl/data/daily/2025/232/AMEL00NLD_R_20252320000_01D_30S_MO.crx.gz \ -O https://gnss-data.kadaster.nl/data/daily/2025/232/EIJS00NLD_R_20252320000_01D_30S_MO.crx.gz \ -O https://gnss-data.kadaster.nl/data/daily/2025/232/VLIS00NLD_R_20252320000_01D_30S_MO.crx.gz
# This downloads UTR2 station data for hours 00-13, at 15-minute intervals (00, 15, 30, 45 minutes) curl -O "https://gnss-data.kadaster.nl/data/highrate/2025/193/UTR200NLD_R_2025193[00-13]{00,15,30,45}_15M_01S_MO.crx.gz"
lftp
# Mirror entire directory of daily data for day 232, 2025 lftp -c "open https://gnss-data.kadaster.nl; mirror /data/daily/2025/232 /local/gnss/2025/232" # Mirror specific stations from entire year with file filtering lftp -c "open https://gnss-data.kadaster.nl; mirror -p -v -I EIJS*.crx.gz -I SDYK*.crx.gz /data/daily/2025 /local/gnss/2025" # Mirror with parallel downloads and retry settings lftp -c "open https://gnss-data.kadaster.nl; set net:max-retries 2; set cmd:parallel 4; mirror /data/daily/2025/232 /local/gnss/2025/232"
# Download all compressed RINEX files from a specific day lftp -c "open https://gnss-data.kadaster.nl; mget /data/daily/2025/232/*.crx.gz" # Download files from specific stations only lftp -c "open https://gnss-data.kadaster.nl/data/daily/2025/232; mget EIJS00NLD_R_20252320000_01D_30S_MO.crx.gz AMEL00NLD_R_20252320000_01D_30S_MO.crx.gz"
import requests url = "https://gnss-data.kadaster.nl/data/daily/2025/232/EIJS00NLD_R_20252320000_01D_30S_MO.crx.gz" response = requests.get(url) if response.status_code == 200: with open("EIJS00NLD_R_20252320000_01D_30S_MO.crx.gz", "wb") as f: f.write(response.content) print("File downloaded successfully") else: print(f"Download failed: {response.status_code}")
import datetime import requests date = datetime.date(2025, 4, 19) Y, j = date.strftime("%Y %j").split() base_url = f"https://gnss-data.kadaster.nl/data/daily/{Y}/{j}/" stations = ["AMEL00NLD", "EIJS00NLD", "SABY00BES"] for station in stations: filename = f"{station}_R_{Y}{j}0000_01D_30S_MO.crx.gz" url = base_url + filename response = requests.get(url) if response.status_code == 200: with open(filename, "wb") as f: f.write(response.content) print(f"Downloaded: {filename}") else: print(f"Failed to download: {filename}")
use LWP::Simple; my $url = "https://gnss-data.kadaster.nl/data/daily/2025/232/EIJS00NLD_R_20252320000_01D_30S_MO.crx.gz"; my $filename = "EIJS00NLD_R_20252320000_01D_30S_MO.crx.gz"; if (getstore($url, $filename) == 200) { print "File downloaded successfully\n"; } else { print "Download failed\n"; }
use LWP::UserAgent; my $ua = LWP::UserAgent->new(); my $base_url = "https://gnss-data.kadaster.nl/data/daily/2025/232/"; my @stations = ("APEL00NLD", "BONK00BES", "ZWO200NLD"); foreach my $station (@stations) { my $filename = "${station}_R_20252320000_01D_30S_MO.crx.gz"; my $url = $base_url . $filename; my $response = $ua->get($url, ':content_file' => $filename); if ($response->is_success) { print "Downloaded: $filename\n"; } else { print "Failed to download: $filename\n"; } }
curl --compressed
or similar options for faster downloadslftp
with cmd:parallel
can speed up batch downloads