Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
72 changes: 72 additions & 0 deletions .github/workflows/docker-publish.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,72 @@
name: Docker

on:
push:
branches: ["master"]
tags: ['v*.*.*', 'latest']

env:
# Use ghcr.io for GitHub Container Registry
REGISTRY: ghcr.io
# GitHub repository as <account>/<repo>
IMAGE_NAME: ${{ github.repository }}


jobs:
build:

runs-on: ubuntu-latest
permissions:
contents: read
packages: write
id-token: write

steps:
- name: Checkout repository
uses: actions/checkout@v4

# Install the cosign for signing images
- name: Install cosign
uses: sigstore/cosign-installer@v3.5.0
with:
cosign-release: 'v2.2.4'

# Set up Docker Buildx for multi-platform builds
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3.0.0

# Log in to GitHub Container Registry
- name: Log into registry ${{ env.REGISTRY }}
uses: docker/login-action@v3.0.0
with:
registry: ${{ env.REGISTRY }}
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}

# Extract metadata for Docker image
- name: Extract Docker metadata
id: meta
uses: docker/metadata-action@v5.0.0
with:
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
tags: |
type=semver,pattern={{version}}

# Build and push Docker image for all architectures
- name: Build and push Docker image
id: build-and-push
uses: docker/build-push-action@v5.0.0
with:
context: .
platforms: linux/amd64,linux/arm64,linux/arm/v7
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
cache-from: type=gha
cache-to: type=gha,mode=max

# Sign the resulting Docker image digest except on PRs
- name: Sign the published Docker image
env:
TAGS: ${{ steps.meta.outputs.tags }}
DIGEST: ${{ steps.build-and-push.outputs.digest }}
run: echo "${TAGS}" | xargs -I {} cosign sign --yes {}@${DIGEST}
5 changes: 5 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
*.gz
*.csv
*.json
*.parquet
*.mmdb
12 changes: 7 additions & 5 deletions Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -4,17 +4,19 @@ RUN apk add --no-cache curl

WORKDIR /data

# set environment variable.
# set environment variable
ENV UPDATE_FREQUENCY=0
ENV IPINFO_TOKEN='98266fdad56289'
ENV IPINFO_DATABASES='country_asn'
ENV IPINFO_TOKEN='my_ipinfo_token'
ENV IPINFO_DATABASES='country_asn.mmdb'
ENV DEFAULT_DB_FORMAT='mmdb'
ENV AUTO_EXTRACT_GZ='false'

# copy the script.
# copy the script
COPY ipinfo.sh /usr/local/bin/ipinfo.sh
RUN chmod +x /usr/local/bin/ipinfo.sh

# create the volume.
VOLUME /data

# run the script.
# run the script
CMD ["/usr/local/bin/ipinfo.sh"]
15 changes: 10 additions & 5 deletions README.md
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
# [<img src="https://ipinfo.io/static/ipinfo-small.svg" alt="IPinfo" width="24"/>](https://ipinfo.io/) IPinfo Docker Image

`ipinfo-db` is a docker image by [IPinfo.io](https://ipinfo.io) that downloads free country asn database in mmdb format.
`ipinfo-db` is a docker image by [IPinfo.io](https://ipinfo.io) that downloads IPInfo databases (for info on what DBs are available, see [here](https://ipinfo.io/developers/database-filename-reference)).

## Pull image
```bash
Expand All @@ -9,17 +9,22 @@ docker pull ipinfo/ipinfo-db:latest

## Configuration

- `IPINFO_TOKEN` (optional) - Set you ipinfo token.
- `IPINFO_DATABASES` (optional) - Databases to download, default to `country_asn`.
- `UPDATE_FREQUENCY` (optional) - Interval of updating database in bash sleep format. If this is not set or is set to 0 (default), image will run once and exit.
- `IPINFO_TOKEN` (required) - Set you IPInfo token available in your [dashboard](https://ipinfo.io/dashboard/token).
- `IPINFO_DATABASES` (optional) - Space-separated list of databases to download. **Notes**:
**(1)** The default value is set to `country_asn.mmdb` to ensure backwards compatibility with the previous version of the image, but we recommend using the newer `ipinfo_lite` database instead. The data provided by `ipinfo_lite` is the same as `country_asn`, but the schema has changed. See [here](https://github.com/ipinfo/docker/issues/9#issuecomment-2868624800) for more details.
- `UPDATE_FREQUENCY` (optional) - Interval of updating database in bash sleep format. If this is not set or is set to `0` (default), image will run once and exit.
- `DEFAULT_DB_FORMAT` (optional) - Default database format. Can be `mmdb`, `csv`, `json` or `parquet`. Defaults to `mmdb`.
- `AUTO_EXTRACT_GZ` (optional) - If set to `true` or `1`, the downloaded files will be extracted from gzipped format. Defaults to `false`. **Notes**:
**(1)** This increases the storage requirements of downloaded files, as both th `.gz` file, and the extracted file will be stored in the same directory - this is to check the hash of the file on disk against the hash of the file on IPinfo's servers (and prevent re-downloading the same file).
**(2)** This variable is only relevant for `.csv` and `.json` files, as the `.mmdb` and `.parquet` files are not gzipped on IPinfo's servers.

## Usage:

```bash
docker run -v <dir>:/data \
-e IPINFO_TOKEN=<ipinfo_token> \
-e UPDATE_FREQUENCY=<update_frequency> \
ipinfo-db
ipinfo/ipinfo-db
```

`<dir>` local directory that you want to download the databases to.
Expand Down
100 changes: 89 additions & 11 deletions ipinfo.sh
100644 → 100755
Original file line number Diff line number Diff line change
@@ -1,30 +1,108 @@
#!/bin/sh

get_root_url() {
# Extract the base name without the file extension
BASE_NAME="${1%%.*}"

case "$BASE_NAME" in
"country_asn" | "country" | "asn")
echo "https://ipinfo.io/data/free/"
;;
*)
echo "https://ipinfo.io/data/"
;;
esac
}

get_file_extension() {
case "$1" in
"mmdb")
echo ".mmdb"
;;
"csv")
echo ".csv.gz"
;;
"json")
echo ".json.gz"
;;
"parquet")
echo ".parquet"
;;
esac
}

while true; do
# Check if DEFAULT_DB_FORMAT is set and valid
if ! [[ "$DEFAULT_DB_FORMAT" =~ ^(mmdb|csv|json|parquet)$ ]]; then
echo "Error: DEFAULT_DB_FORMAT is either not set, or is not allowed. Please set it to either 'mmdb', 'csv', 'json', or 'parquet'. Value received: '$DEFAULT_DB_FORMAT'"
break
fi

# Iterate over the databases
for DATABASE in ${IPINFO_DATABASES}; do
if [ -f ${DATABASE}.mmdb ]; then
LOCAL=$(sha256sum ${DATABASE}.mmdb | awk '{print $1}')
REMOTE=$(curl --silent https://ipinfo.io/data/free/${DATABASE}.mmdb/checksums?token=${IPINFO_TOKEN} \
# Check if DATABASE already has a file extension
if [[ "$DATABASE" != *.* ]]; then
# Append the correct file extension based on DEFAULT_DB_FORMAT
FILE_EXTENSION=$(get_file_extension "$DEFAULT_DB_FORMAT")
DATABASE="${DATABASE}${FILE_EXTENSION}"
fi

# Retrieve the correct root URL based on the database name
BASE_URL=$(get_root_url "$DATABASE")
DB_URL="${BASE_URL}${DATABASE}"

if [ -f "${DATABASE}" ]; then
LOCAL=$(sha256sum "${DATABASE}" | awk '{print $1}')
REMOTE=$(curl --silent ${DB_URL}/checksums?token=${IPINFO_TOKEN} \
| sed -n 's/.*"sha256": *"\([a-f0-9]*\)".*/\1/p')
# Check if the local and remote checksums are the same
# If they are, skip the download
if [ "$LOCAL" = "$REMOTE" ]; then
echo "${DATABASE}.mmdb is up-to-date."
echo "${DATABASE} is up to date."
continue
fi
fi

# Download the database
RESPONSE=$(curl \
-s -w '%{http_code}' -L -o "${DATABASE}.mmdb.new" \
"https://ipinfo.io/data/free/${DATABASE}.mmdb?token=${IPINFO_TOKEN}")
-s -w '%{http_code}' -L -o "${DATABASE}.new" \
"${DB_URL}?token=${IPINFO_TOKEN}")
if [ "$RESPONSE" != "200" ]; then
echo "$RESPONSE Failed to download ${DATABASE}.mmdb database."
rm "${DATABASE}.mmdb.new" 2> /dev/null
# Check if response code is 429
if [ "$RESPONSE" = "429" ]; then
echo "Rate limit exceeded. Please try again later."
break
else
echo "$RESPONSE Failed to download ${DATABASE} database from '${DB_URL}'."
break
fi
rm "${DATABASE}.new" 2> /dev/null
else
echo "${DATABASE}.mmdb database downloaded in /data volume."
mv "${DATABASE}.mmdb.new" "${DATABASE}.mmdb"
echo "${DATABASE} database downloaded in /data volume."
mv "${DATABASE}.new" "${DATABASE}"

# Check if automated extraction of GZ files is enabled
if [ "$AUTO_EXTRACT_GZ" = "1" ] || [ "$AUTO_EXTRACT_GZ" = "true" ]; then
# Check if the file is a GZ file
if [[ "${DATABASE}" == *.gz ]]; then
# Extract the GZIP file (while keeping the original)
gunzip -k "${DATABASE}"

# Check if the extraction was successful
if [ $? -eq 0 ]; then
echo "Extracted ${DATABASE} to ${DATABASE%.gz}"
else
echo "Failed to extract ${DATABASE}"
fi
fi
fi
fi
done

if [ $UPDATE_FREQUENCY == 0 ]; then
if [ "$UPDATE_FREQUENCY" = "0" ]; then
break
else
echo "Sleeping for $UPDATE_FREQUENCY seconds before the next update."
fi

sleep "$UPDATE_FREQUENCY"
Expand Down