2
3

現在発表中の気象警報・注意報を取得

Posted at
import json
import re

import pandas as pd
import requests
from bs4 import BeautifulSoup

# 市町村区のコード
AREA_CODE = "3820200"


def fetch_soup(url, parser="html.parser"):
    r = requests.get(url)
    r.raise_for_status()

    soup = BeautifulSoup(r.content, parser)

    return soup


def fetch_json(url):
    r = requests.get(url)
    r.raise_for_status()

    result = r.json()
    return result


def fetch_area(url):
    area_json = fetch_json(url)

    df_city = pd.DataFrame(area_json["class20s"]).T.add_prefix("city_")
    df_district = pd.DataFrame(area_json["class15s"]).T.drop("children", axis=1).add_prefix("district_")
    df_region = pd.DataFrame(area_json["class10s"]).T.drop("children", axis=1).add_prefix("region_")
    df_pref = pd.DataFrame(area_json["offices"]).T.drop("children", axis=1).add_prefix("pref_")

    merged_df = pd.merge(df_city, df_district, left_on="city_parent", right_index=True)
    merged_df = pd.merge(merged_df, df_region, left_on="district_parent", right_index=True)
    df_area = pd.merge(merged_df, df_pref, left_on="region_parent", right_index=True)

    return df_area


def extract_js_object(js_content):
    match = re.search(r"f\s*=\s*({[\s\S]*?})\s*,\s*A\s*=", js_content, re.DOTALL)
    if not match:
        print("オブジェクトが見つかりませんでした。")
        return None

    obj_content = re.sub(r"(\w+):", r'"\1":', match.group(1).replace("'", '"'))

    try:
        data = json.loads(obj_content)
        return data
    except json.JSONDecodeError as e:
        print(f"JSONデコードエラー: {e}")
        return None


def fetch_alert(url):
    soup = fetch_soup(url)

    contents = soup.find_all("script")[10]
    js_content = contents.encode("ascii").decode("unicode-escape")

    data = extract_js_object(js_content)

    df_alert = pd.DataFrame(data).T
    df_alert["name"] = df_alert["name1"] + df_alert["name2"]

    df_alert = df_alert.drop(["name1", "name2"], axis=1)

    return df_alert


if __name__ == "__main__":
    # エリア一覧作成
    area_url = "https://www.jma.go.jp/bosai/common/const/area.json"

    df_area = fetch_area(area_url)
    df_area

    # アラート一覧作成
    alert_url = f"https://www.jma.go.jp/bosai/warning/#area_type=class20s&area_code={AREA_CODE}&lang=ja"

    df_alert = fetch_alert(alert_url)
    df_alert

    # 市の情報抽出
    CITY_DATA = df_area.loc[AREA_CODE]

    region_url = f'https://www.jma.go.jp/bosai/warning/data/warning/{CITY_DATA["region_parent"]}.json'

    # アラート情報取得
    warning_json = fetch_json(region_url)

    data = warning_json["areaTypes"][1]["areas"]

    df_warning = pd.json_normalize(
        data,
        record_path="warnings",
        meta=["code"],
        record_prefix="warning_",
        errors="ignore",
    )

    # アラート一覧と結合
    merge_df = pd.merge(df_warning, df_alert, left_on="warning_code", right_index=True)

    # エリア一覧と結合
    df_status = pd.merge(merge_df, df_area, left_on="code", right_index=True)
    df_status

    df = df_status.loc[df_status["code"] == AREA_CODE].copy()
    df

    ts_date = pd.to_datetime(warning_json["reportDatetime"], errors="coerce").tz_localize(None)

    # 警報・注意報を表示
    print(ts_date.strftime("%Y-%m-%d %X"))
    print(f'\n{CITY_DATA["pref_name"]}{CITY_DATA["city_name"]}の気象警報・注意報\n')

    df["warning_attentions"] = df["warning_attentions"].fillna("")

    if df.empty:
        print("現在発表警報・注意報はありません。")
    else:
        for i, row in df.iterrows():
            n = len(row["warning_attentions"])
            attentions = "".join(row["warning_attentions"]) if n > 0 else ""
            print(f"{row['name']} {attentions}".strip())

参考

2
3
0

Register as a new user and use Qiita more conveniently

  1. You get articles that match your needs
  2. You can efficiently read back useful information
  3. You can use dark theme
What you can do with signing up
2
3