- Downgraded `aws-sdk-s3` to version 1.170 and updated `aws-sdk-core` to 3.211 to maintain compatibility with existing code. - Updated various column types in the database schema from `integer` to `bigint` to handle larger values and improve data integrity. - Modified the `sync_geo_data.rake` task to use `find_or_create_by` for `Country`, improving the logic for ensuring unique countries based on ISO code. These changes enhance dependency management and improve schema robustness while maintaining functionality.
183 lines
5.3 KiB
Ruby
183 lines
5.3 KiB
Ruby
# lib/tasks/sync_geo_data.rake
|
|
require "json"
|
|
|
|
namespace :geo do
|
|
desc "Sync geographical data from JSON"
|
|
task sync: :environment do
|
|
# 定义 JSON 文件路径
|
|
base_path = Rails.root.join("lib", "data")
|
|
|
|
# 同步顺序很重要:先同步 Regions -> Subregions -> Countries -> States -> Cities
|
|
sync_regions(base_path)
|
|
sync_subregions(base_path)
|
|
sync_countries(base_path)
|
|
sync_states(base_path)
|
|
sync_cities(base_path)
|
|
end
|
|
|
|
def sync_regions(base_path)
|
|
file_path = base_path.join("regions.json")
|
|
regions = JSON.parse(File.read(file_path))
|
|
sum = regions.count
|
|
count = 1
|
|
|
|
regions.each do |data|
|
|
region = Region.find_or_initialize_by(id: data["id"]) do |r|
|
|
r.name = data["name"]
|
|
end
|
|
puts "Sync Regions[#{count}/#{sum}]: [#{region.name}]"
|
|
count += 1
|
|
|
|
region.update(
|
|
translations: data["translations"],
|
|
flag: data["flag"] || true,
|
|
wiki_data_id: data["wikiDataId"]
|
|
)
|
|
end
|
|
end
|
|
|
|
def sync_subregions(base_path)
|
|
file_path = base_path.join("subregions.json")
|
|
subregions = JSON.parse(File.read(file_path))
|
|
sum = subregions.count
|
|
count = 1
|
|
|
|
subregions.each do |data|
|
|
subregion = Subregion.find_or_initialize_by(id: data["id"]) do |s|
|
|
s.name = data["name"]
|
|
s.region_id = data["region_id"]
|
|
end
|
|
puts "Sync Subregions[#{count}/#{sum}]: [#{subregion.name}]"
|
|
count += 1
|
|
|
|
subregion.update(
|
|
translations: data["translations"],
|
|
flag: data["flag"] || true,
|
|
wiki_data_id: data["wikiDataId"]
|
|
)
|
|
end
|
|
end
|
|
|
|
def sync_countries(base_path)
|
|
file_path = base_path.join("countries.json")
|
|
countries = JSON.parse(File.read(file_path))
|
|
sum = countries.count
|
|
count = 1
|
|
|
|
countries.each do |data|
|
|
# puts "Syncing countries for #{data["name"]}"
|
|
# 处理 Region
|
|
region = if data["region_id"]
|
|
Region.find_by(id: data["region_id"])
|
|
elsif data["region"]
|
|
Region.find_by(name: data["region"])
|
|
end
|
|
|
|
# 处理 Subregion
|
|
subregion = nil
|
|
if data["subregion_id"].present?
|
|
subregion = Subregion.find_by(id: data["subregion_id"])
|
|
elsif data["subregion"].present?
|
|
subregion = Subregion.find_by(name: data["subregion"])
|
|
end
|
|
puts "Syncing Country[#{count}/#{sum}] [#{data["name"]}] region: [#{region&.name}] subregion: [#{data["subregion"]}]"
|
|
count += 1
|
|
|
|
# 查找或初始化 Country
|
|
country = Country.find_or_create_by(code: data["iso2"]) do |c|
|
|
c.name = data["name"]
|
|
end
|
|
|
|
# 更新 Country 属性
|
|
country.update(
|
|
iso3: data["iso3"],
|
|
numeric_code: data["numeric_code"],
|
|
iso2: data["iso2"],
|
|
code: data["iso2"],
|
|
phonecode: data["phonecode"],
|
|
capital: data["capital"],
|
|
currency: data["currency"],
|
|
currency_name: data["currency_name"],
|
|
currency_symbol: data["currency_symbol"],
|
|
tld: data["tld"],
|
|
native: data["native"],
|
|
nationality: data["nationality"],
|
|
timezones: data["timezones"],
|
|
translations: data["translations"],
|
|
latitude: data["latitude"],
|
|
longitude: data["longitude"],
|
|
emoji: data["emoji"],
|
|
emoji_u: data["emojiU"],
|
|
flag: data["flag"] || true,
|
|
wiki_data_id: data["wikiDataId"]
|
|
)
|
|
country.update(region: region) if region != nil
|
|
country.update(subregion: subregion) if subregion != nil
|
|
end
|
|
end
|
|
|
|
def sync_states(base_path)
|
|
file_path = base_path.join("states.json")
|
|
states = JSON.parse(File.read(file_path))
|
|
sum = states.count
|
|
count = 1
|
|
|
|
states.each do |data|
|
|
puts "Syncing State[#{count}/#{sum}] [#{data["name"]}] "
|
|
count += 1
|
|
|
|
state = State.find_or_initialize_by(name: data["name"]) do |s|
|
|
s.country_id = data["country_id"]
|
|
end
|
|
|
|
state.update(
|
|
country_code: data["country_code"],
|
|
fips_code: data["fips_code"],
|
|
iso2: data["iso2"],
|
|
code: data["state_code"],
|
|
state_type: data["type"],
|
|
level: data["level"],
|
|
parent_id: data["parent_id"],
|
|
latitude: data["latitude"],
|
|
longitude: data["longitude"],
|
|
flag: data["flag"] || true,
|
|
wiki_data_id: data["wikiDataId"]
|
|
)
|
|
end
|
|
end
|
|
|
|
def sync_cities(base_path)
|
|
file_path = base_path.join("cities.json")
|
|
cities = JSON.parse(File.read(file_path))
|
|
sum = cities.count
|
|
count = 1
|
|
|
|
cities.each do |data|
|
|
city = City.find_or_initialize_by(name: data["name"])
|
|
|
|
country = Country.find_by(name: data["country_name"])
|
|
state = State.find_by(name: data["state_name"])
|
|
|
|
puts "Syncing City[#{count}/#{sum}] [#{data["name"]}] Country:[#{country&.name}] "
|
|
count += 1
|
|
|
|
city.update(
|
|
latitude: data["latitude"],
|
|
longitude: data["longitude"],
|
|
flag: data["flag"] || true,
|
|
wiki_data_id: data["wikiDataId"]
|
|
)
|
|
if country != nil
|
|
city.update(
|
|
country: country,
|
|
country_code: country.code,
|
|
)
|
|
end
|
|
city.update(state: state) unless state == nil
|
|
city.update(state_code: state.code) unless state == nil
|
|
city.update(active: false) if city.active == nil
|
|
city.update(priority: 10) if city.priority == nil
|
|
end
|
|
end
|
|
end
|