Compare commits

...

51 Commits

Author SHA1 Message Date
Lin f67d3e9130 update
2 days ago
Lin 315c9246d2 update doc
2 days ago
Lin 41becde0aa update UML
2 days ago
Lin ca15429378 Update readme
2 days ago
Lin 71f8221b0f delete clawer readme
2 days ago
Lin 40524b3fec self report
2 days ago
Lin 21cf165ac6 sql
2 days ago
Lin ad3796b17e 07 team report
2 days ago
Lin c5c7925352 Collect into folders
2 days ago
Lin bd2d2f8b38 collect into folders
2 days ago
Lin 27fa2329f9 update docs
2 days ago
Lin 11af1fff19 update code
2 days ago
Lin 87594b987b update src
2 days ago
Lin edb6f27f88 Merge branch 'main' into dev
2 days ago
Lin a29f33b493 Merge branch 'dev-clawer' into dev
2 days ago
Lin f1052b180b collect into folder
2 days ago
Lin c648389959 final version of clawer
2 days ago
Lin 8188fb18cb collect src into folder
2 days ago
RichardWang 902661b7aa 文档及规范命名
4 days ago
RichardWang f909563daa 提交运行演示视频
4 days ago
RichardWang 91beb1a23b 上传UML模型及海报
3 weeks ago
Lin 09bf09c0ac Multi dialogue & formate the response
1 month ago
Lin 1c9ec9b167 csv convert to xlsx
1 month ago
RichardWang b77b3ebca3 修复查询bug
1 month ago
RichardWang 668c076cd0 修改平均延误时间及各数值单位
1 month ago
RichardWang 3b39df1cb1 other
1 month ago
RichardWang 0647a38283 增加三字码字段
1 month ago
RichardWang b882879d6b 修改跳转网站字段
1 month ago
Lin 3eecc9ed60 tianjin--guiyang flight data
1 month ago
Lin 9e6d9eafc6 flight data
1 month ago
Lin 1c8a00fbbb merge without operateFlightNo
1 month ago
Lin d4e76dba28 Batch import flight data
1 month ago
Lin 9ce47b9e8a others
1 month ago
Lin 663de7af66 import data
1 month ago
Lin 3f9776c4b1 merge all comfort info
1 month ago
RichardWang 2d268a2fe4 update
1 month ago
RichardWang 2dfbc22e14 修改用户信息字段
1 month ago
RichardWang 888f21867e 城市选择后端代码
1 month ago
RichardWang 5494f582aa 前端页面增加日期选择和城市选择框
1 month ago
RichardWang e10f4ed351 修改样式
1 month ago
RichardWang 3281e92d7f 增加字段
1 month ago
RichardWang 66bf8c062a 实现页面跳转
1 month ago
Lin 37a072fadc capture flight comfort info 0.1
2 months ago
Lin 280e11d2ce init
2 months ago
Lin e7e9cd94da update .class
2 months ago
Lin 26612cb0f4 Date index
2 months ago
Lin 4166439970 jump to index
2 months ago
Lin fc1467bf5f add Date index
2 months ago
Lin 06c2fd3d81 other code
2 months ago
Lin 22050ebd56 Basic function and AI assistant
2 months ago
Lin 0075eb4ed9 Calling API
2 months ago

@ -1,2 +0,0 @@
# 软工课设123

Binary file not shown.

After

Width:  |  Height:  |  Size: 180 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 96 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 67 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 67 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 56 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 36 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 464 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 57 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 126 KiB

Before

Width:  |  Height:  |  Size: 75 KiB

After

Width:  |  Height:  |  Size: 75 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 56 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 72 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 81 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 61 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 76 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 65 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 248 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 64 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 88 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 7.8 MiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 62 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 5.5 MiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 22 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 42 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 177 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 39 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 144 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 186 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 127 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 34 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 142 KiB

Binary file not shown.

Binary file not shown.

After

Width:  |  Height:  |  Size: 263 KiB

@ -0,0 +1,6 @@
projectKey=clawer
serverUrl=http://localhost:9000
serverVersion=7.8.0.26217
dashboardUrl=http://localhost:9000/dashboard?id=clawer
ceTaskId=AZMv5JVBnAUFl5pPDUTm
ceTaskUrl=http://localhost:9000/api/ce/task?id=AZMv5JVBnAUFl5pPDUTm

@ -0,0 +1,21 @@
MIT License
Copyright (c) 2024 Suysker
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

@ -0,0 +1,157 @@
import os
import re
import subprocess
# Global variables for proxy switch count
proxy_switch_count = 0
iface_ipv6_dict = {}
def is_root():
return os.geteuid() == 0
def interface_usable(interface_name, skip_check=False, ipv6_address='2400:3200::1', max_retries=3):
if skip_check:
return True
current_try = 0
while current_try < max_retries:
try:
cmd_result = subprocess.run(["ping", "-c", "1", "-I", interface_name, ipv6_address], stdout=subprocess.PIPE, stderr=subprocess.PIPE, timeout=5)
if cmd_result.returncode == 0:
return True # 成功ping通直接返回True
except subprocess.TimeoutExpired:
print(f"Ping attempt {current_try + 1} of {max_retries} timed out. Retrying...")
except subprocess.SubprocessError as e:
# 捕获其他subprocess相关的异常
print(f"An error occurred while trying to ping: {e}. Retrying...")
current_try += 1
return False # 所有尝试后仍未成功返回False
def get_existing_interfaces(base_interface='eth0'):
cmd_result = subprocess.run(["ip", "addr", "show"], stdout=subprocess.PIPE)
output = cmd_result.stdout.decode()
# 匹配接口名称
iface_pattern = re.compile(re.escape(base_interface) + r'_([0-9]+)@')
iface_matches = iface_pattern.findall(output)
# 构建完整的接口名称列表
interfaces = [f"{base_interface}_{match}" for match in iface_matches]
# 初始化字典来存储接口名称与其IPv6地址的映射
iface_ipv6_dict = {}
for iface in interfaces:
# 对于每个接口查找其IPv6地址这里假设只提取第一个IPv6地址
# 注意需要确保只匹配特定接口的IPv6地址因此使用iface作为正则表达式的一部分
cmd_result = subprocess.run(["ip", "addr", "show", iface], stdout=subprocess.PIPE)
output = cmd_result.stdout.decode()
ipv6_pattern = re.compile(r"inet6\s+([0-9a-f:]+)\/\d+")
ipv6_matches = ipv6_pattern.findall(output)
# 过滤掉以"fe80"开头的IPv6地址
ipv6_addresses = [addr for addr in ipv6_matches if not addr.startswith("fe80")]
# 如果存在非链路本地的IPv6地址只取第一个地址
if ipv6_addresses:
iface_ipv6_dict[iface] = ipv6_addresses[0]
return iface_ipv6_dict
def execute_ip6tables_command(command):
sudo_cmd = ["sudo"] if not is_root() else []
cmd = sudo_cmd + command.split()
subprocess.run(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
def switch_proxy_server(mode='normal'):
global proxy_switch_count
global iface_ipv6_dict
if mode == 'normal':
if iface_ipv6_dict:
proxy_switch_count += 1
proxy_index = proxy_switch_count % len(iface_ipv6_dict)
selected_interface = list(iface_ipv6_dict.keys())[proxy_index]
ipv6_address = iface_ipv6_dict[selected_interface]
# 清空自定义链
execute_ip6tables_command('ip6tables -t nat -F FAKE_IPV6_CHAIN')
# 添加SNAT规则
execute_ip6tables_command(f'ip6tables -t nat -A FAKE_IPV6_CHAIN -j SNAT --to-source {ipv6_address}')
print(f"Using interface: {selected_interface}, Connecting to: {ipv6_address}")
def create_ipv6_addresses(n, base_interface='eth0', delete_interface=True):
sudo_cmd = ["sudo"] if not is_root() else []
if delete_interface:
delete_ipv6_addresses(base_interface)
existing_interfaces = list(get_existing_interfaces(base_interface).keys())
interfaces = []
for i in range(1, n + 1):
interface_name = f"{base_interface}_{i}"
# Check if the interface exists, if yes, delete it first
if interface_name in existing_interfaces:
if interface_usable(interface_name):
print(f"Interface {interface_name} already exists. Skipping creation.")
interfaces.append(interface_name)
continue
else:
subprocess.run(sudo_cmd + ["ip", "link", "delete", interface_name])
# Now add the interface
subprocess.run(sudo_cmd + ["ip", "link", "add", "link", base_interface, interface_name, "type", "macvlan", "mode", "bridge"])
subprocess.run(sudo_cmd + ["ip", "link", "set", interface_name, "up"])
#subprocess.run(sudo_cmd + ["dhclient", "-6", "-nw", interface_name])
interfaces.append(interface_name)
return interfaces
def delete_ipv6_addresses(base_interface='eth0'):
sudo_cmd = ["sudo"] if not is_root() else []
existing_interfaces = list(get_existing_interfaces(base_interface).keys())
for interface_name in existing_interfaces:
subprocess.run(sudo_cmd + ["ip", "link", "delete", interface_name])
def stop_proxy_servers(base_interface='eth0', delete_interface=True):
# 删除流量重定向到自定义链
execute_ip6tables_command('ip6tables -t nat -D POSTROUTING -j FAKE_IPV6_CHAIN')
# 删除自定义链
execute_ip6tables_command('ip6tables -t nat -X FAKE_IPV6_CHAIN')
if delete_interface:
print("正在关闭代理服务器...")
print("删除IPv6地址...")
delete_ipv6_addresses(base_interface)
print("代理服务器已关闭.")
else:
print("正在关闭代理服务器...")
print("代理服务器已关闭.")
def start_proxy_servers(n, mode='normal', base_interface='eth0', delete_interface=True):
global iface_ipv6_dict
interfaces = create_ipv6_addresses(n, base_interface, delete_interface)
#获取生成的接口及IP
iface_ipv6_dict = get_existing_interfaces(base_interface)
if iface_ipv6_dict:
# 删除流量重定向到自定义链
execute_ip6tables_command('ip6tables -t nat -D POSTROUTING -j FAKE_IPV6_CHAIN')
# 删除自定义链
execute_ip6tables_command('ip6tables -t nat -X FAKE_IPV6_CHAIN')
# 创建自定义链
execute_ip6tables_command('ip6tables -t nat -N FAKE_IPV6_CHAIN')
# 流量重定向到自定义链
execute_ip6tables_command(f'ip6tables -t nat -A POSTROUTING -o {base_interface} -j FAKE_IPV6_CHAIN')
if mode == 'normal':
selected_interface = list(iface_ipv6_dict.keys())[0]
ipv6_address = iface_ipv6_dict[selected_interface]
# 添加SNAT规则
execute_ip6tables_command(f'ip6tables -t nat -A FAKE_IPV6_CHAIN -j SNAT --to-source {ipv6_address}')
print(f"Using interface: {selected_interface}, Connecting to: {ipv6_address}")
elif mode == 'random':
for index, (interface, ipv6_address) in enumerate(iface_ipv6_dict.items()):
adjusted_probability = 1/(len(iface_ipv6_dict)-index)
execute_ip6tables_command(f'ip6tables -t nat -A FAKE_IPV6_CHAIN -m statistic --mode random --probability {adjusted_probability} -j SNAT --to-source {ipv6_address}')

@ -0,0 +1,73 @@
import pandas as pd
import os
from datetime import datetime, timedelta
def get_departure_destination(file_name):
name_without_extension = os.path.splitext(file_name)[0]
return name_without_extension
def merge_csv_files(csv_files, output_xlsx):
all_dfs = []
for csv_file in csv_files:
df = pd.read_csv(csv_file)
# 添加日期列
date = os.path.basename(os.path.dirname(os.path.dirname(csv_file)))
df['出发日期'] = date
# 选择指定的列
selected_columns = [
'航班号','出发城市','到达城市', '航空公司', '出发日期', '出发时间', '到达时间',
'中转信息', 'economy_origin', '经济舱餐食信息', '经济舱座椅间距', '出发延误时间'
]
df = df[selected_columns]
# 重命名 'economy_origin' 为 '票价'
df = df.rename(columns={'economy_origin': '票价'})
all_dfs.append(df)
# 合并所有数据框
merged_df = pd.concat(all_dfs, ignore_index=True)
# 保存为Excel文件
merged_df.to_excel(output_xlsx, index=False, engine='openpyxl')
# 设置日期范围
start_date = datetime(2024, 11, 12)# 起始日期
end_date = datetime(2024, 11, 19)# 结束日期
clawer_date = datetime(2024, 11, 12)# 爬虫日期
# 设置输入和输出文件夹路径
input_base_path = "./"
output_folder = "./xlsx_output"
# 确保输出文件夹存在
if not os.path.exists(output_folder):
os.makedirs(output_folder)
# 用于存储同一始发地和目的地的CSV文件
route_files = {}
current_date = start_date
while current_date <= end_date:
folder_name = current_date.strftime("%Y-%m-%d")
folder_path = os.path.join(input_base_path, folder_name, clawer_date.strftime("%Y-%m-%d"))
if os.path.exists(folder_path):
for file_name in os.listdir(folder_path):
if file_name.endswith('.csv'):
csv_path = os.path.join(folder_path, file_name)
route = get_departure_destination(file_name)
if route not in route_files:
route_files[route] = []
route_files[route].append(csv_path)
current_date += timedelta(days=1)
# 合并并保存每个路线的文件
for route, files in route_files.items():
output_xlsx = os.path.join(output_folder, f"{route}.xlsx")
merge_csv_files(files, output_xlsx)
print(f"已合并并保存路线: {route} -> {output_xlsx}")
print("所有CSV文件已成功合并为XLSX文件并筛选了指定的列")

File diff suppressed because it is too large Load Diff

@ -0,0 +1,90 @@
import pandas as pd
import mysql.connector
from mysql.connector import Error
import os
from datetime import datetime, timedelta
# 数据库连接配置
db_config = {
'host': '152.136.166.253', # 修改这里,去掉端口号
'port': 8989, # 单独指定端口号
'database': 'fly_ticket',
'user': 'root',
'password': 'Cauc@2024'
}
def import_csv_to_db(file_path, cursor):
df = pd.read_csv(file_path)
for index, row in df.iterrows():
sql = """INSERT INTO flight (f_n, f_s_p, f_a_p, f_s_a, f_a_a, f_s_t, f_a_t, f_Date, f_Delay, f_p, f_food, f_wide, f_depcode, f_dstcode)
VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)
ON DUPLICATE KEY UPDATE
f_s_p = VALUES(f_s_p),
f_a_p = VALUES(f_a_p),
f_s_a = VALUES(f_s_a),
f_a_a = VALUES(f_a_a),
f_s_t = VALUES(f_s_t),
f_a_t = VALUES(f_a_t),
f_Delay = VALUES(f_Delay),
f_p = VALUES(f_p),
f_food = VALUES(f_food),
f_wide = VALUES(f_wide),
f_depcode = VALUES(f_depcode),
f_dstcode = VALUES(f_dstcode);"""
values = (
row['航班号'],
row['出发城市'],
row['到达城市'],
row['出发机场'],
row['到达机场'],
row['出发时间'],
row['到达时间'],
row['出发日期'],
row['出发延误时间'],
row['economy_origin'],
row['经济舱餐食信息'],
row['经济舱座椅间距'],
row['出发机场三字码'],
row['到达机场三字码']
)
cursor.execute(sql, values)
try:
# 连接到数据库
conn = mysql.connector.connect(**db_config)
if conn.is_connected():
cursor = conn.cursor()
# 设置日期范围
start_date = datetime(2024, 11, 12)
end_date = datetime(2024, 11, 20)
current_date = start_date
while current_date <= end_date:
folder_name = current_date.strftime("%Y-%m-%d")
folder_path = os.path.join("D:\college\SE2\Ctrip-Crawler-main\Ctrip-Crawler-withComfortInfo", folder_name, "2024-11-12")
if os.path.exists(folder_path):
for file_name in os.listdir(folder_path):
if file_name.endswith('.csv'):
file_path = os.path.join(folder_path, file_name)
import_csv_to_db(file_path, cursor)
print(f"已导入文件: {file_path}")
current_date += timedelta(days=1)
# 提交更改
conn.commit()
print("所有数据成功插入到数据库")
except Error as e:
print(f"连接数据库时出错: {e}")
finally:
if 'conn' in locals() and conn.is_connected():
cursor.close()
conn.close()
print("数据库连接已关闭")

@ -0,0 +1,412 @@
import io
import os
import gzip
import time
import json
import random
import requests
import threading
import pandas as pd
from seleniumwire import webdriver
from datetime import datetime as dt,timedelta
from selenium.webdriver.common.by import By
from selenium.webdriver.common.keys import Keys
from selenium.common.exceptions import TimeoutException,StaleElementReferenceException,ElementNotInteractableException,ElementClickInterceptedException # 加载异常
def getcitycode():
cityname,code=[],[]
#采用携程的api接口
city_url='https://flights.ctrip.com/online/api/poi/get?v='+str(random.random())
headers={
'dnt':'1',
'referer':'https://verify.ctrip.com/',
'user-agent':'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/102.0.0.0 Safari/537.36'
}
r=requests.get(city_url,headers=headers)
citys=json.loads(r.text).get('data')
for city in citys:
if city =='热门':
continue
for key in city:
try:
for k in citys[city][key]:
cityname.append(k['display'])
code.append(k['data'])
except:
continue
citycode=dict(zip(cityname,code))
return cityname,citycode
class FLIGHT(object):
def __init__(self):
self.url = 'https://flights.ctrip.com/online/list/oneway' #携程机票查询页面
self.chromeDriverPath = 'C:/Program Files/Google/Chrome/Application/chromedriver' #chromedriver位置
self.options = webdriver.ChromeOptions() # 创建一个配置对象
#self.options.add_argument('--incognito') # 隐身模式(无痕模式)
#self.options.add_argument('User-Agent=%s'%UserAgent().random) # 替换User-Agent
self.options.add_argument("--disable-blink-features")
self.options.add_argument("--disable-blink-features=AutomationControlled")
self.options.add_experimental_option("excludeSwitches", ['enable-automation'])# 不显示正在受自动化软件控制
self.driver = webdriver.Chrome(executable_path=self.chromeDriverPath,chrome_options=self.options)
self.driver.maximize_window()
self.err=0#错误重试次数
def getpage(self):
##############获取地区码
self.startcode=self.citycode[self.city[0]][-3:]
self.endcode=self.citycode[self.city[1]][-3:]
##############生成访问链接
flights_url=self.url+'-'+self.startcode+'-'+self.endcode+'?&depdate='+self.date
print(flights_url)
##############设置加载超时阈值
self.driver.set_page_load_timeout(300)
try:
self.driver.get(flights_url)
except:
print('页面连接失败')
self.driver.close()
self.getpage()
else:
try:
##############判断是否存在验证码
self.driver.find_element(By.CLASS_NAME,"basic-alert.alert-giftinfo")
print('等待2小时后重试')
time.sleep(7200)
self.getpage()
except:
##############不存在验证码,执行下一步
self.remove_btn()
def remove_btn(self):
try:
js_remove="$('.notice-box').remove();"
self.driver.execute_script(js_remove)
except Exception as e:
print('防疫移除失败',e)
else:
self.changecity()
def changecity(self):
try:
#获取出发地与目的地元素位置
its=self.driver.find_elements(By.CLASS_NAME,'form-input-v3')
#若出发地与目标值不符,则更改出发地
while self.city[0] not in its[0].get_attribute('value'):
its[0].click()
time.sleep(0.5)
its[0].send_keys(Keys.CONTROL + 'a')
time.sleep(0.5)
its[0].send_keys(self.city[0])
time.sleep(0.5)
#若目的地与目标值不符,则更改目的地
while self.city[1] not in its[1].get_attribute('value'):
its[1].click()
time.sleep(0.5)
its[1].send_keys(Keys.CONTROL + 'a')
time.sleep(0.5)
its[1].send_keys(self.city[1])
time.sleep(0.5)
try:
#通过低价提醒按钮实现enter键换页
self.driver.implicitly_wait(5) # seconds
self.driver.find_elements(By.CLASS_NAME,'low-price-remind')[0].click()
except IndexError as e:
print('\n更换城市错误 找不到元素',e)
#以防万一
its[1].send_keys(Keys.ENTER)
print('\n更换城市成功',self.city[0]+'-'+self.city[1])
except (ElementNotInteractableException,StaleElementReferenceException,ElementClickInterceptedException,ElementClickInterceptedException) as e:
print('\n更换城市错误 元素错误',e)
self.err+=1
if self.err<=5:
self.click_btn()
else:
self.err=0
del self.driver.requests
self.getpage()
except Exception as e:
print('\n更换城市错误',e)
#删除本次请求
del self.driver.requests
#从头开始重新执行程序
self.getpage()
else:
#若无错误,执行下一步
self.err=0
self.getdata()
def getdata(self):
try:
#等待响应加载完成
self.predata = self.driver.wait_for_request('/international/search/api/search/batchSearch?.*', timeout=60)
rb=dict(json.loads(self.predata.body).get('flightSegments')[0])
except TimeoutException as e:
print('\获取数据错误',e)
#删除本次请求
del self.driver.requests
#从头开始重新执行程序
self.getpage()
else:
#检查数据获取正确性
if rb['departureCityName'] == self.city[0] and rb['arrivalCityName'] == self.city[1]:
print('城市获取正确')
#删除本次请求
del self.driver.requests
#若无错误,执行下一步
self.decode_data()
else:
#删除本次请求
del self.driver.requests
#重新更换城市
self.changecity()
def decode_data(self):
try:
buf = io.BytesIO(self.predata.response.body)
gf = gzip.GzipFile(fileobj = buf)
self.dedata = gf.read().decode('UTF-8')
self.dedata=json.loads(self.dedata)
except:
print('重新获取数据')
self.getpage()
else:
#若无错误,执行下一步
self.check_data()
def check_data(self):
try:
self.flightItineraryList=self.dedata['data']['flightItineraryList']
#倒序遍历,删除转机航班
for i in range(len(self.flightItineraryList)-1, -1, -1):
if self.flightItineraryList[i]['flightSegments'][0]['transferCount'] !=0:
self.flightItineraryList.pop(i)
if len(self.flightItineraryList):
#存在直航航班,执行下一步
self.muti_process()
else:
print('不存在直航航班')
return 0
except:
print('不存在直航航班')
return 0
def muti_process(self):
processes = []
self.flights = pd.DataFrame()
self.prices = pd.DataFrame()
#处理航班信息
processes.append(threading.Thread(target=self.proc_flightSegments))
#处理票价信息
processes.append(threading.Thread(target=self.proc_priceList))
for pro in processes:
pro.start()
for pro in processes:
pro.join()
#若无错误,执行下一步
self.mergedata()
def proc_flightSegments(self):
for flightlist in self.flightItineraryList:
flightlist=flightlist['flightSegments'][0]['flightList']
flightUnitList=dict(flightlist[0])
departureday=flightUnitList['departureDateTime'].split(' ')[0]
departuretime=flightUnitList['departureDateTime'].split(' ')[1]
arrivalday=flightUnitList['arrivalDateTime'].split(' ')[0]
arrivaltime=flightUnitList['arrivalDateTime'].split(' ')[1]
#删除一些不重要的信息
dellist=['sequenceNo', 'marketAirlineCode',
'departureProvinceId','departureCityId','departureCityCode','departureAirportShortName','departureTerminal',
'arrivalProvinceId','arrivalCityId','arrivalCityCode','arrivalAirportShortName','arrivalTerminal',
'transferDuration','stopList','leakedVisaTagSwitch','trafficType','highLightPlaneNo','mealType',
'operateAirlineCode','arrivalDateTime','departureDateTime','operateFlightNo','operateAirlineName']
for value in dellist:
try:
flightUnitList.pop(value)
except:
continue
#更新日期格式
flightUnitList.update({'departureday': departureday, 'departuretime': departuretime,
'arrivalday': arrivalday, 'arrivaltime': arrivaltime})
self.flights=pd.concat([self.flights,pd.DataFrame(flightUnitList,index=[0])],ignore_index=True)
def proc_priceList(self):
for flightlist in self.flightItineraryList:
flightNo=flightlist['itineraryId'].split('_')[0]
priceList=flightlist['priceList']
#经济舱,经济舱折扣
economy,economy_discount=[],[]
#商务舱,商务舱折扣
bussiness,bussiness_discount=[],[]
for price in priceList:
adultPrice=price['adultPrice']
cabin=price['cabin']
priceUnitList=dict(price['priceUnitList'][0]['flightSeatList'][0])
discountRate=priceUnitList['discountRate']
#经济舱
if cabin=='Y':
economy.append(adultPrice)
economy_discount.append(discountRate)
#商务舱
elif cabin=='C':
bussiness.append(adultPrice)
bussiness_discount.append(discountRate)
if economy !=[]:
try:
economy_origin=economy[economy_discount.index(1)]
except:
economy_origin=int(max(economy)/max(economy_discount))
if min(economy_discount) !=1:
economy_low=min(economy)
economy_cut=min(economy_discount)
else:
economy_low=''
economy_cut=''
else:
economy_origin=''
economy_low=''
economy_cut=''
if bussiness !=[]:
try:
bussiness_origin=bussiness[bussiness_discount.index(1)]
except:
bussiness_origin=int(max(bussiness)/max(bussiness_discount))
if min(bussiness_discount) !=1:
bussiness_low=min(bussiness)
bussiness_cut=min(bussiness_discount)
else:
bussiness_low=''
bussiness_cut=''
else:
bussiness_origin=''
bussiness_low=''
bussiness_cut=''
price_info={'flightNo':flightNo,
'economy_origin':economy_origin,'economy_low':economy_low,'economy_cut':economy_cut,
'bussiness_origin':bussiness_origin,'bussiness_low':bussiness_low,'bussiness_cut':bussiness_cut}
#self.prices=self.prices.append(price_info,ignore_index=True)
self.prices=pd.concat([self.prices,pd.DataFrame(price_info,index=[0])],ignore_index=True)
def mergedata(self):
try:
self.df = self.flights.merge(self.prices,on=['flightNo'])
self.df['数据获取日期']=dt.now().strftime('%Y-%m-%d')
#对pandas的columns进行重命名
order=['数据获取日期','航班号','航空公司',
'出发日期','出发时间','到达日期','到达时间','飞行时长','出发国家','出发城市','出发机场','出发机场三字码',
'到达国家','到达城市','到达机场','到达机场三字码','飞机型号','飞机尺寸','飞机型号三字码',
'经济舱原价','经济舱最低价','经济舱折扣','商务舱原价','商务舱最低价','商务舱折扣',
'到达准点率','停留次数']
origin=['数据获取日期','flightNo','marketAirlineName',
'departureday','departuretime','arrivalday','arrivaltime','duration',
'departureCountryName','departureCityName','departureAirportName','departureAirportCode',
'arrivalCountryName','arrivalCityName','arrivalAirportName','arrivalAirportCode',
'aircraftName','aircraftSize','aircraftCode',
'economy_origin','economy_low','economy_cut',
'bussiness_origin','bussiness_low','bussiness_cut',
'arrivalPunctuality','stopCount']
columns=dict(zip(origin,order))
self.df=self.df.rename(columns=columns)
self.df = self.df[order]
if not os.path.exists(self.date):
os.makedirs(self.date)
filename=os.getcwd()+'\\'+self.date+'\\'+self.date+'-'+self.city[0]+'-'+self.city[1]+'.csv'
self.df.to_csv(filename,encoding='GB18030',index=False)
print('\n数据爬取完成',filename)
except Exception as e:
print('合并数据失败',e)
def demain(self,citys,citycode):
self.citycode=citycode
#设置出发日期
self.date=dt.now()+timedelta(days=7)
self.date=self.date.strftime('%Y-%m-%d')
for city in citys:
self.city=city
if citys.index(city)==0:
#第一次运行
self.getpage()
else:
#后续运行只需更换出发与目的地
self.changecity()
#运行结束退出
self.driver.quit()
if __name__ == '__main__':
citys=[]
cityname,citycode=getcitycode()
city=['上海','广州','深圳','北京']
ytic=list(reversed(city))
for m in city:
for n in ytic:
if m==n:
continue
else:
citys.append([m,n])
fly = FLIGHT()
fly.demain(citys,citycode)
print('\n程序运行完成!!!!')

@ -0,0 +1,397 @@
import io
import os
import gzip
import time
import json
import threading
import pandas as pd
from seleniumwire import webdriver
from datetime import datetime as dt,timedelta
from selenium.webdriver.common.by import By
from selenium.webdriver.common.keys import Keys
from selenium.common.exceptions import TimeoutException,StaleElementReferenceException,ElementNotInteractableException,ElementClickInterceptedException # 加载异常
class FLIGHT(object):
def __init__(self):
self.chromeDriverPath = 'C:/Program Files/Google/Chrome/Application/chromedriver' #chromedriver位置
self.options = webdriver.ChromeOptions() # 创建一个配置对象
self.options.add_argument('--incognito') # 隐身模式(无痕模式)
self.options.add_argument("--disable-blink-features")
self.options.add_argument("--disable-blink-features=AutomationControlled")
self.options.add_experimental_option("excludeSwitches", ['enable-automation'])# 不显示正在受自动化软件控制
self.driver = webdriver.Chrome(executable_path=self.chromeDriverPath,chrome_options=self.options)
self.driver.set_page_load_timeout(300)#设置加载超时阈值
self.driver.maximize_window()
self.err=0#错误重试次数
#前往首页
self.driver.get('https://flights.ctrip.com/online/channel/domestic')
def getpage(self):
try:
self.driver.find_element(By.CLASS_NAME,'pc_home-jipiao').click()#点击飞机图标,返回主界面
self.driver.implicitly_wait(5) # seconds
self.driver.find_elements(By.CLASS_NAME,'radio-label')[0].click()#单程
while self.driver.find_elements(By.CSS_SELECTOR,"[aria-label=请选择日期]")[0].get_attribute("value") != self.date:
self.driver.find_element(By.CLASS_NAME,'modifyDate.depart-date').click()#点击日期选择
for m in self.driver.find_elements(By.CLASS_NAME,'date-picker.date-picker-block'):
if int(m.find_element(By.CLASS_NAME,'month').text[:-1]) != int(self.date[5:7]):
continue
for d in m.find_elements(By.CLASS_NAME,'date-d'):
if int(d.text) == int(self.date[-2:]):
d.click()
break
self.driver.find_element(By.CLASS_NAME,'search-btn').click()#搜索
except:
print('页面连接失败')
self.driver.close()
self.getpage()
else:
try:
##############判断是否存在验证码
self.driver.find_element(By.ID,"verification-code")
print('等待2小时后重试')
time.sleep(7200)
self.getpage()
except:
##############不存在验证码,执行下一步
self.changecity()
def remove_btn(self):
try:
js_remove="$('.notice-box').remove();"
self.driver.execute_script(js_remove)
except Exception as e:
print('防疫移除失败',e)
def changecity(self):
#移除防疫提醒
self.remove_btn()
try:
#获取出发地与目的地元素位置
its=self.driver.find_elements(By.CLASS_NAME,'form-input-v3')
#若出发地与目标值不符,则更改出发地
while self.city[0] not in its[0].get_attribute('value'):
its[0].click()
time.sleep(0.5)
its[0].send_keys(Keys.CONTROL + 'a')
time.sleep(0.5)
its[0].send_keys(self.city[0])
time.sleep(0.5)
#若目的地与目标值不符,则更改目的地
while self.city[1] not in its[1].get_attribute('value'):
its[1].click()
time.sleep(0.5)
its[1].send_keys(Keys.CONTROL + 'a')
time.sleep(0.5)
its[1].send_keys(self.city[1])
time.sleep(0.5)
try:
#通过低价提醒按钮实现enter键换页
self.driver.implicitly_wait(5) # seconds
self.driver.find_elements(By.CLASS_NAME,'low-price-remind')[0].click()
except IndexError as e:
print('\n更换城市错误 找不到元素',e)
#以防万一
its[1].send_keys(Keys.ENTER)
print('\n更换城市成功',self.city[0]+'-'+self.city[1])
#捕获错误
except (IndexError,ElementNotInteractableException,StaleElementReferenceException,ElementClickInterceptedException,ElementClickInterceptedException) as e:
print('\n更换城市错误 元素错误',e)
self.err+=1
if self.err<=5:
self.changecity()
else:
self.err=0
del self.driver.requests
self.getpage()
except Exception as e:
print('\n更换城市错误',e)
#删除本次请求
del self.driver.requests
#从头开始重新执行程序
self.getpage()
else:
#若无错误,执行下一步
self.err=0
self.getdata()
def getdata(self):
try:
#等待响应加载完成
self.predata = self.driver.wait_for_request('/international/search/api/search/batchSearch?.*', timeout=30)
rb=dict(json.loads(self.predata.body).get('flightSegments')[0])
except TimeoutException as e:
print('\获取数据错误',e)
#删除本次请求
del self.driver.requests
#从头开始重新执行程序
self.getpage()
else:
#检查数据获取正确性
if rb['departureCityName'] == self.city[0] and rb['arrivalCityName'] == self.city[1]:
print('城市获取正确')
#删除本次请求
del self.driver.requests
#若无错误,执行下一步
self.decode_data()
else:
#删除本次请求
del self.driver.requests
#重新更换城市
self.changecity()
def decode_data(self):
try:
buf = io.BytesIO(self.predata.response.body)
gf = gzip.GzipFile(fileobj = buf)
self.dedata = gf.read().decode('UTF-8')
self.dedata=json.loads(self.dedata)
except:
print('重新获取数据')
self.getpage()
else:
#若无错误,执行下一步
self.check_data()
def check_data(self):
try:
self.flightItineraryList=self.dedata['data']['flightItineraryList']
#倒序遍历,删除转机航班
for i in range(len(self.flightItineraryList)-1, -1, -1):
if self.flightItineraryList[i]['flightSegments'][0]['transferCount'] !=0:
self.flightItineraryList.pop(i)
if len(self.flightItineraryList):
#存在直航航班,执行下一步
self.muti_process()
else:
print('不存在直航航班')
return 0
except:
print('不存在直航航班')
return 0
def muti_process(self):
processes = []
self.flights = pd.DataFrame()
self.prices = pd.DataFrame()
#处理航班信息
processes.append(threading.Thread(target=self.proc_flightSegments))
#处理票价信息
processes.append(threading.Thread(target=self.proc_priceList))
for pro in processes:
pro.start()
for pro in processes:
pro.join()
#若无错误,执行下一步
self.mergedata()
def proc_flightSegments(self):
for flightlist in self.flightItineraryList:
flightlist=flightlist['flightSegments'][0]['flightList']
flightUnitList=dict(flightlist[0])
departureday=flightUnitList['departureDateTime'].split(' ')[0]
departuretime=flightUnitList['departureDateTime'].split(' ')[1]
arrivalday=flightUnitList['arrivalDateTime'].split(' ')[0]
arrivaltime=flightUnitList['arrivalDateTime'].split(' ')[1]
#删除一些不重要的信息
dellist=['sequenceNo', 'marketAirlineCode',
'departureProvinceId','departureCityId','departureCityCode','departureAirportShortName','departureTerminal',
'arrivalProvinceId','arrivalCityId','arrivalCityCode','arrivalAirportShortName','arrivalTerminal',
'transferDuration','stopList','leakedVisaTagSwitch','trafficType','highLightPlaneNo','mealType',
'operateAirlineCode','arrivalDateTime','departureDateTime','operateFlightNo','operateAirlineName']
for value in dellist:
try:
flightUnitList.pop(value)
except:
continue
#更新日期格式
flightUnitList.update({'departureday': departureday, 'departuretime': departuretime,
'arrivalday': arrivalday, 'arrivaltime': arrivaltime})
self.flights=pd.concat([self.flights,pd.DataFrame(flightUnitList,index=[0])],ignore_index=True)
def proc_priceList(self):
for flightlist in self.flightItineraryList:
flightNo=flightlist['itineraryId'].split('_')[0]
priceList=flightlist['priceList']
#经济舱,经济舱折扣
economy,economy_discount=[],[]
#商务舱,商务舱折扣
bussiness,bussiness_discount=[],[]
for price in priceList:
adultPrice=price['adultPrice']
cabin=price['cabin']
priceUnitList=dict(price['priceUnitList'][0]['flightSeatList'][0])
discountRate=priceUnitList['discountRate']
#经济舱
if cabin=='Y':
economy.append(adultPrice)
economy_discount.append(discountRate)
#商务舱
elif cabin=='C':
bussiness.append(adultPrice)
bussiness_discount.append(discountRate)
if economy !=[]:
try:
economy_origin=economy[economy_discount.index(1)]
except:
economy_origin=int(max(economy)/max(economy_discount))
if min(economy_discount) !=1:
economy_low=min(economy)
economy_cut=min(economy_discount)
else:
economy_low=''
economy_cut=''
else:
economy_origin=''
economy_low=''
economy_cut=''
if bussiness !=[]:
try:
bussiness_origin=bussiness[bussiness_discount.index(1)]
except:
bussiness_origin=int(max(bussiness)/max(bussiness_discount))
if min(bussiness_discount) !=1:
bussiness_low=min(bussiness)
bussiness_cut=min(bussiness_discount)
else:
bussiness_low=''
bussiness_cut=''
else:
bussiness_origin=''
bussiness_low=''
bussiness_cut=''
price_info={'flightNo':flightNo,
'economy_origin':economy_origin,'economy_low':economy_low,'economy_cut':economy_cut,
'bussiness_origin':bussiness_origin,'bussiness_low':bussiness_low,'bussiness_cut':bussiness_cut}
#self.prices=self.prices.append(price_info,ignore_index=True)
self.prices=pd.concat([self.prices,pd.DataFrame(price_info,index=[0])],ignore_index=True)
def mergedata(self):
try:
self.df = self.flights.merge(self.prices,on=['flightNo'])
self.df['数据获取日期']=dt.now().strftime('%Y-%m-%d')
#对pandas的columns进行重命名
order=['数据获取日期','航班号','航空公司',
'出发日期','出发时间','到达日期','到达时间','飞行时长','出发国家','出发城市','出发机场','出发机场三字码',
'到达国家','到达城市','到达机场','到达机场三字码','飞机型号','飞机尺寸','飞机型号三字码',
'经济舱原价','经济舱最低价','经济舱折扣','商务舱原价','商务舱最低价','商务舱折扣',
'到达准点率','停留次数']
origin=['数据获取日期','flightNo','marketAirlineName',
'departureday','departuretime','arrivalday','arrivaltime','duration',
'departureCountryName','departureCityName','departureAirportName','departureAirportCode',
'arrivalCountryName','arrivalCityName','arrivalAirportName','arrivalAirportCode',
'aircraftName','aircraftSize','aircraftCode',
'economy_origin','economy_low','economy_cut',
'bussiness_origin','bussiness_low','bussiness_cut',
'arrivalPunctuality','stopCount']
columns=dict(zip(origin,order))
self.df=self.df.rename(columns=columns)
self.df = self.df[order]
if not os.path.exists(self.date):
os.makedirs(self.date)
filename=os.getcwd()+'\\'+self.date+'\\'+self.date+'-'+self.city[0]+'-'+self.city[1]+'.csv'
self.df.to_csv(filename,encoding='GB18030',index=False)
print('\n数据爬取完成',filename)
except Exception as e:
print('合并数据失败',e)
def demain(self,citys):
#设置出发日期
self.date=dt.now()+timedelta(days=1)
self.date=self.date.strftime('%Y-%m-%d')
for city in citys:
self.city=city
if citys.index(city)==0:
#第一次运行
self.getpage()
else:
#后续运行只需更换出发与目的地
self.changecity()
#运行结束退出
self.driver.quit()
if __name__ == '__main__':
citys=[]
city=['上海','广州','深圳','北京']
#形成城市对
ytic=list(reversed(city))
for m in city:
for n in ytic:
if m==n:
continue
else:
citys.append([m,n])
fly = FLIGHT()
fly.demain(citys)
print('\n程序运行完成!!!!')

@ -0,0 +1,143 @@
import requests
import datetime
import re
import demjson
import time
import pandas as pd
def create_assist_date(datestart = None,dateend = None):
# 创建日期辅助表
if datestart is None:
datestart = '2020-01-01'
if dateend is None:
dateend = (datetime.datetime.now()+datetime.timedelta(days=-1)).strftime('%Y-%m-%d')
# 转为日期格式
datestart=datetime.datetime.strptime(datestart,'%Y-%m-%d')
dateend=datetime.datetime.strptime(dateend,'%Y-%m-%d')
date_list = []
date_list.append(datestart.strftime('%Y-%m-%d'))
while datestart<dateend:
# 日期叠加一天
datestart+=datetime.timedelta(days=+1)
# 日期转字符串存入列表
date_list.append(datestart.strftime('%Y-%m-%d'))
return date_list
def getdata(citys,dateseries):
url='https://www.lsjpjg.com/getthis.php'
headers={
'Accept': 'application/json, text/javascript, */*; q=0.01',
'Accept-Encoding': 'gzip, deflate, br',
'Accept-Language': 'zh-CN,zh;q=0.9',
'Host': 'www.lsjpjg.com',
'Origin': 'https://www.lsjpjg.com',
'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/96.0.4647.116 Safari/537.36',
'X-Requested-With': 'XMLHttpRequest'
}
for city in citys:
df=pd.DataFrame()
err=0
for date in dateseries:
data={'dep_dt': date,'dep_ct': city[0],'arr_ct': city[1]}
res=requests.post(url, headers=headers,data=data)
#判断航线是否一直不存在
if res.text=='\ufeff[]' :
print(city,'无航班',date)
err+=1
#数量超过阈值则中断该航线
if err>30:
break
continue
else:
err-=1
print(city,date)
res.encoding=res.apparent_encoding
NewResponse = re.sub(r"/","",res.text)
try:
r=NewResponse.encode('utf-8')
j=demjson.decode(r)
except:
continue
temp=pd.DataFrame(j)
try:
temp.drop('icon',axis=1,inplace=True)
temp['出发日期']=date
except:
continue
df=pd.concat([df,temp])
time.sleep(0.5)
filename=city[0]+'-'+city[1]
#处理原始数据
proc_data(filename,df,interval=8)
def proc_data(filename,df,interval=8):
#保存原始数据至本地
df.to_csv(filename+'.csv',encoding='GB18030')
df['全票价']=0
df['日期差']=None
for i in df.index:
try:
if not '经济' in df['discount'][i]:
df.drop(index=i,inplace=True)
elif '' in df['discount'][i]:
#判断出发日期与查询日期之间的间隔是否大于阈值
delta=datetime.datetime.strptime(df['出发日期'][i],'%Y-%m-%d')-datetime.datetime.strptime(df['qry_dt'][i],'%Y-%m-%d')
if delta.days >interval:
df.drop(index=i,inplace=True)
continue
else:
df.loc[i,'日期差']=delta.days
#通过折扣率计算全票价
discount=float(re.findall('\d+\.?\d*',df['discount'][i])[0])
full_price=df['price'][i]/discount*10
df.loc[i,'全票价']=full_price
elif ('全价'or'经典') in df['discount'][i]:
#判断出发日期与查询日期之间的间隔是否大于阈值
delta=datetime.datetime.strptime(df['出发日期'][i],'%Y-%m-%d')-datetime.datetime.strptime(df['qry_dt'][i],'%Y-%m-%d')
if delta.days >interval:
df.drop(index=i,inplace=True)
continue
else:
df.loc[i,'日期差']=delta.days
#全票价
full_price=df['price'][i]
df.loc[i,'全票价']=full_price
except:
df.drop(index=i,inplace=True)
avg_full_price=df[df['全票价']!=0].groupby(['出发日期'])[['全票价']].mean()
avg_price=df[df['全票价']!=df['price']].groupby(['出发日期'])[['price']].mean()
result=pd.concat([avg_price,avg_full_price],axis=1)
result['折扣']=result['price']/result['全票价']
#将处理后的数据保存至本地
result.to_csv(result+'-'+filename+'.csv',encoding='GB18030')
if __name__ == '__main__':
citys=[]
#设置开始与结束日期
dateseries=create_assist_date(datestart = None,dateend = None)
city=['上海','广州','深圳','北京']
ytic=list(reversed(city))
for m in city:
for n in ytic:
if m==n:
continue
else:
citys.append([m,n])
getdata(citys,dateseries)

@ -0,0 +1,17 @@
# must be unique in a given SonarQube instance
sonar.projectKey=clawer
# --- optional properties ---
# defaults to project key
sonar.projectName=clawer
# defaults to 'not provided'
#sonar.projectVersion=1.0
# Path is relative to the sonar-project.properties file. Defaults to .
#sonar.sources=src,WebContent
# Encoding of the source code. Default is default system encoding
sonar.sourceEncoding=UTF-8
#sonar.java.binaries=target/classes/javabean,target/classes/servlet

File diff suppressed because one or more lines are too long

@ -0,0 +1,27 @@
<?xml version="1.0" encoding="UTF-8"?>
<classpath>
<classpathentry including="**/*.java" kind="src" output="target/classes" path="src">
<attributes>
<attribute name="optional" value="true"/>
<attribute name="maven.pomderived" value="true"/>
</attributes>
</classpathentry>
<classpathentry kind="con" path="org.eclipse.jst.j2ee.internal.web.container"/>
<classpathentry kind="con" path="org.eclipse.jst.j2ee.internal.module.container"/>
<classpathentry kind="con" path="org.eclipse.m2e.MAVEN2_CLASSPATH_CONTAINER">
<attributes>
<attribute name="maven.pomderived" value="true"/>
</attributes>
</classpathentry>
<classpathentry kind="lib" path="WebContent/WEB-INF/lib/commons-fileupload-1.4.jar"/>
<classpathentry kind="lib" path="WebContent/WEB-INF/lib/commons-io-2.6.jar"/>
<classpathentry kind="lib" path="WebContent/WEB-INF/lib/json-20240303.jar"/>
<classpathentry kind="lib" path="WebContent/WEB-INF/lib/mysql-connector-java-8.0.16.jar"/>
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER">
<attributes>
<attribute name="module" value="true"/>
</attributes>
</classpathentry>
<classpathentry kind="con" path="org.eclipse.jst.server.core.container/org.eclipse.jst.server.tomcat.runtimeTarget/Apache Tomcat v9.0"/>
<classpathentry kind="output" path="target/classes"/>
</classpath>

@ -0,0 +1,8 @@
# Default ignored files
/shelf/
/workspace.xml
# Editor-based HTTP Client requests
/httpRequests/
# Datasource local storage ignored files
/dataSources/
/dataSources.local.xml

@ -0,0 +1,6 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="ProjectRootManager" version="2" languageLevel="JDK_21" default="true" project-jdk-name="jdk21" project-jdk-type="JavaSDK">
<output url="file://$PROJECT_DIR$/classes" />
</component>
</project>

@ -0,0 +1,37 @@
<?xml version="1.0" encoding="UTF-8"?>
<projectDescription>
<name>air_ticket_book</name>
<comment></comment>
<projects>
</projects>
<buildSpec>
<buildCommand>
<name>org.eclipse.jdt.core.javabuilder</name>
<arguments>
</arguments>
</buildCommand>
<buildCommand>
<name>org.eclipse.wst.common.project.facet.core.builder</name>
<arguments>
</arguments>
</buildCommand>
<buildCommand>
<name>org.eclipse.wst.validation.validationbuilder</name>
<arguments>
</arguments>
</buildCommand>
<buildCommand>
<name>org.eclipse.m2e.core.maven2Builder</name>
<arguments>
</arguments>
</buildCommand>
</buildSpec>
<natures>
<nature>org.eclipse.m2e.core.maven2Nature</nature>
<nature>org.eclipse.jem.workbench.JavaEMFNature</nature>
<nature>org.eclipse.wst.common.modulecore.ModuleCoreNature</nature>
<nature>org.eclipse.wst.common.project.facet.core.nature</nature>
<nature>org.eclipse.jdt.core.javanature</nature>
<nature>org.eclipse.wst.jsdt.core.jsNature</nature>
</natures>
</projectDescription>

@ -0,0 +1 @@
../atob/bin/atob.js

@ -0,0 +1 @@
../autoprefixer/bin/autoprefixer

@ -0,0 +1 @@
../esprima/bin/esparse.js

@ -0,0 +1 @@
../esprima/bin/esvalidate.js

@ -0,0 +1 @@
../gonzales-pe/bin/gonzales.js

@ -0,0 +1 @@
../js-yaml/bin/js-yaml.js

@ -0,0 +1 @@
../@babel/parser/bin/babel-parser.js

@ -0,0 +1 @@
../specificity/bin/specificity

@ -0,0 +1 @@
../stylelint/bin/stylelint.js

@ -0,0 +1,22 @@
MIT License
Copyright (c) 2014-2018 Sebastian McKenzie <sebmck@gmail.com>
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
"Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

@ -0,0 +1,19 @@
# @babel/code-frame
> Generate errors that contain a code frame that point to source locations.
See our website [@babel/code-frame](https://babeljs.io/docs/en/next/babel-code-frame.html) for more information.
## Install
Using npm:
```sh
npm install --save-dev @babel/code-frame
```
or using yarn:
```sh
yarn add @babel/code-frame --dev
```

@ -0,0 +1,173 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.codeFrameColumns = codeFrameColumns;
exports.default = _default;
function _highlight() {
const data = _interopRequireWildcard(require("@babel/highlight"));
_highlight = function () {
return data;
};
return data;
}
function _interopRequireWildcard(obj) { if (obj && obj.__esModule) { return obj; } else { var newObj = {}; if (obj != null) { for (var key in obj) { if (Object.prototype.hasOwnProperty.call(obj, key)) { var desc = Object.defineProperty && Object.getOwnPropertyDescriptor ? Object.getOwnPropertyDescriptor(obj, key) : {}; if (desc.get || desc.set) { Object.defineProperty(newObj, key, desc); } else { newObj[key] = obj[key]; } } } } newObj.default = obj; return newObj; } }
let deprecationWarningShown = false;
function getDefs(chalk) {
return {
gutter: chalk.grey,
marker: chalk.red.bold,
message: chalk.red.bold
};
}
const NEWLINE = /\r\n|[\n\r\u2028\u2029]/;
function getMarkerLines(loc, source, opts) {
const startLoc = Object.assign({
column: 0,
line: -1
}, loc.start);
const endLoc = Object.assign({}, startLoc, loc.end);
const {
linesAbove = 2,
linesBelow = 3
} = opts || {};
const startLine = startLoc.line;
const startColumn = startLoc.column;
const endLine = endLoc.line;
const endColumn = endLoc.column;
let start = Math.max(startLine - (linesAbove + 1), 0);
let end = Math.min(source.length, endLine + linesBelow);
if (startLine === -1) {
start = 0;
}
if (endLine === -1) {
end = source.length;
}
const lineDiff = endLine - startLine;
const markerLines = {};
if (lineDiff) {
for (let i = 0; i <= lineDiff; i++) {
const lineNumber = i + startLine;
if (!startColumn) {
markerLines[lineNumber] = true;
} else if (i === 0) {
const sourceLength = source[lineNumber - 1].length;
markerLines[lineNumber] = [startColumn, sourceLength - startColumn];
} else if (i === lineDiff) {
markerLines[lineNumber] = [0, endColumn];
} else {
const sourceLength = source[lineNumber - i].length;
markerLines[lineNumber] = [0, sourceLength];
}
}
} else {
if (startColumn === endColumn) {
if (startColumn) {
markerLines[startLine] = [startColumn, 0];
} else {
markerLines[startLine] = true;
}
} else {
markerLines[startLine] = [startColumn, endColumn - startColumn];
}
}
return {
start,
end,
markerLines
};
}
function codeFrameColumns(rawLines, loc, opts = {}) {
const highlighted = (opts.highlightCode || opts.forceColor) && (0, _highlight().shouldHighlight)(opts);
const chalk = (0, _highlight().getChalk)(opts);
const defs = getDefs(chalk);
const maybeHighlight = (chalkFn, string) => {
return highlighted ? chalkFn(string) : string;
};
if (highlighted) rawLines = (0, _highlight().default)(rawLines, opts);
const lines = rawLines.split(NEWLINE);
const {
start,
end,
markerLines
} = getMarkerLines(loc, lines, opts);
const hasColumns = loc.start && typeof loc.start.column === "number";
const numberMaxWidth = String(end).length;
let frame = lines.slice(start, end).map((line, index) => {
const number = start + 1 + index;
const paddedNumber = ` ${number}`.slice(-numberMaxWidth);
const gutter = ` ${paddedNumber} | `;
const hasMarker = markerLines[number];
const lastMarkerLine = !markerLines[number + 1];
if (hasMarker) {
let markerLine = "";
if (Array.isArray(hasMarker)) {
const markerSpacing = line.slice(0, Math.max(hasMarker[0] - 1, 0)).replace(/[^\t]/g, " ");
const numberOfMarkers = hasMarker[1] || 1;
markerLine = ["\n ", maybeHighlight(defs.gutter, gutter.replace(/\d/g, " ")), markerSpacing, maybeHighlight(defs.marker, "^").repeat(numberOfMarkers)].join("");
if (lastMarkerLine && opts.message) {
markerLine += " " + maybeHighlight(defs.message, opts.message);
}
}
return [maybeHighlight(defs.marker, ">"), maybeHighlight(defs.gutter, gutter), line, markerLine].join("");
} else {
return ` ${maybeHighlight(defs.gutter, gutter)}${line}`;
}
}).join("\n");
if (opts.message && !hasColumns) {
frame = `${" ".repeat(numberMaxWidth + 1)}${opts.message}\n${frame}`;
}
if (highlighted) {
return chalk.reset(frame);
} else {
return frame;
}
}
function _default(rawLines, lineNumber, colNumber, opts = {}) {
if (!deprecationWarningShown) {
deprecationWarningShown = true;
const message = "Passing lineNumber and colNumber is deprecated to @babel/code-frame. Please use `codeFrameColumns`.";
if (process.emitWarning) {
process.emitWarning(message, "DeprecationWarning");
} else {
const deprecationError = new Error(message);
deprecationError.name = "DeprecationWarning";
console.warn(new Error(message));
}
}
colNumber = Math.max(colNumber, 0);
const location = {
start: {
column: colNumber,
line: lineNumber
}
};
return codeFrameColumns(rawLines, location, opts);
}

@ -0,0 +1,54 @@
{
"_args": [
[
"@babel/code-frame@7.0.0",
"/home/travis/build/SonarSource/sonar-css/sonar-css-plugin/css-bundle"
]
],
"_from": "@babel/code-frame@7.0.0",
"_id": "@babel/code-frame@7.0.0",
"_inBundle": false,
"_integrity": "sha512-OfC2uemaknXr87bdLUkWog7nYuliM9Ij5HUcajsVcMCpQrcLmtxRbVFTIqmcSkSeYRBFBRxs2FiUqFJDLdiebA==",
"_location": "/@babel/code-frame",
"_phantomChildren": {},
"_requested": {
"type": "version",
"registry": true,
"raw": "@babel/code-frame@7.0.0",
"name": "@babel/code-frame",
"escapedName": "@babel%2fcode-frame",
"scope": "@babel",
"rawSpec": "7.0.0",
"saveSpec": null,
"fetchSpec": "7.0.0"
},
"_requiredBy": [
"/@babel/core",
"/@babel/template",
"/@babel/traverse"
],
"_resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.0.0.tgz",
"_spec": "7.0.0",
"_where": "/home/travis/build/SonarSource/sonar-css/sonar-css-plugin/css-bundle",
"author": {
"name": "Sebastian McKenzie",
"email": "sebmck@gmail.com"
},
"dependencies": {
"@babel/highlight": "^7.0.0"
},
"description": "Generate errors that contain a code frame that point to source locations.",
"devDependencies": {
"chalk": "^2.0.0",
"strip-ansi": "^4.0.0"
},
"homepage": "https://babeljs.io/",
"license": "MIT",
"main": "lib/index.js",
"name": "@babel/code-frame",
"repository": {
"type": "git",
"url": "https://github.com/babel/babel/tree/master/packages/babel-code-frame"
},
"version": "7.0.0"
}

@ -0,0 +1,22 @@
MIT License
Copyright (c) 2014-present Sebastian McKenzie and other contributors
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
"Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

@ -0,0 +1,19 @@
# @babel/core
> Babel compiler core.
See our website [@babel/core](https://babeljs.io/docs/en/next/babel-core.html) for more information or the [issues](https://github.com/babel/babel/issues?utf8=%E2%9C%93&q=is%3Aissue+label%3A%22pkg%3A%20core%22+is%3Aopen) associated with this package.
## Install
Using npm:
```sh
npm install --save-dev @babel/core
```
or using yarn:
```sh
yarn add @babel/core --dev
```

@ -0,0 +1,199 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.makeStrongCache = makeStrongCache;
exports.makeWeakCache = makeWeakCache;
exports.assertSimpleType = assertSimpleType;
function makeStrongCache(handler) {
return makeCachedFunction(new Map(), handler);
}
function makeWeakCache(handler) {
return makeCachedFunction(new WeakMap(), handler);
}
function makeCachedFunction(callCache, handler) {
return function cachedFunction(arg, data) {
let cachedValue = callCache.get(arg);
if (cachedValue) {
for (const _ref of cachedValue) {
const {
value,
valid
} = _ref;
if (valid(data)) return value;
}
}
const cache = new CacheConfigurator(data);
const value = handler(arg, cache);
if (!cache.configured()) cache.forever();
cache.deactivate();
switch (cache.mode()) {
case "forever":
cachedValue = [{
value,
valid: () => true
}];
callCache.set(arg, cachedValue);
break;
case "invalidate":
cachedValue = [{
value,
valid: cache.validator()
}];
callCache.set(arg, cachedValue);
break;
case "valid":
if (cachedValue) {
cachedValue.push({
value,
valid: cache.validator()
});
} else {
cachedValue = [{
value,
valid: cache.validator()
}];
callCache.set(arg, cachedValue);
}
}
return value;
};
}
class CacheConfigurator {
constructor(data) {
this._active = true;
this._never = false;
this._forever = false;
this._invalidate = false;
this._configured = false;
this._pairs = [];
this._data = data;
}
simple() {
return makeSimpleConfigurator(this);
}
mode() {
if (this._never) return "never";
if (this._forever) return "forever";
if (this._invalidate) return "invalidate";
return "valid";
}
forever() {
if (!this._active) {
throw new Error("Cannot change caching after evaluation has completed.");
}
if (this._never) {
throw new Error("Caching has already been configured with .never()");
}
this._forever = true;
this._configured = true;
}
never() {
if (!this._active) {
throw new Error("Cannot change caching after evaluation has completed.");
}
if (this._forever) {
throw new Error("Caching has already been configured with .forever()");
}
this._never = true;
this._configured = true;
}
using(handler) {
if (!this._active) {
throw new Error("Cannot change caching after evaluation has completed.");
}
if (this._never || this._forever) {
throw new Error("Caching has already been configured with .never or .forever()");
}
this._configured = true;
const key = handler(this._data);
this._pairs.push([key, handler]);
return key;
}
invalidate(handler) {
if (!this._active) {
throw new Error("Cannot change caching after evaluation has completed.");
}
if (this._never || this._forever) {
throw new Error("Caching has already been configured with .never or .forever()");
}
this._invalidate = true;
this._configured = true;
const key = handler(this._data);
this._pairs.push([key, handler]);
return key;
}
validator() {
const pairs = this._pairs;
return data => pairs.every(([key, fn]) => key === fn(data));
}
deactivate() {
this._active = false;
}
configured() {
return this._configured;
}
}
function makeSimpleConfigurator(cache) {
function cacheFn(val) {
if (typeof val === "boolean") {
if (val) cache.forever();else cache.never();
return;
}
return cache.using(() => assertSimpleType(val()));
}
cacheFn.forever = () => cache.forever();
cacheFn.never = () => cache.never();
cacheFn.using = cb => cache.using(() => assertSimpleType(cb()));
cacheFn.invalidate = cb => cache.invalidate(() => assertSimpleType(cb()));
return cacheFn;
}
function assertSimpleType(value) {
if (value != null && typeof value !== "string" && typeof value !== "boolean" && typeof value !== "number") {
throw new Error("Cache keys must be either string, boolean, number, null, or undefined.");
}
return value;
}

@ -0,0 +1,439 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.buildPresetChain = buildPresetChain;
exports.buildRootChain = buildRootChain;
exports.buildPresetChainWalker = void 0;
function _path() {
const data = _interopRequireDefault(require("path"));
_path = function () {
return data;
};
return data;
}
function _debug() {
const data = _interopRequireDefault(require("debug"));
_debug = function () {
return data;
};
return data;
}
var _options = require("./validation/options");
var _patternToRegex = _interopRequireDefault(require("./pattern-to-regex"));
var _files = require("./files");
var _caching = require("./caching");
var _configDescriptors = require("./config-descriptors");
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
const debug = (0, _debug().default)("babel:config:config-chain");
function buildPresetChain(arg, context) {
const chain = buildPresetChainWalker(arg, context);
if (!chain) return null;
return {
plugins: dedupDescriptors(chain.plugins),
presets: dedupDescriptors(chain.presets),
options: chain.options.map(o => normalizeOptions(o))
};
}
const buildPresetChainWalker = makeChainWalker({
init: arg => arg,
root: preset => loadPresetDescriptors(preset),
env: (preset, envName) => loadPresetEnvDescriptors(preset)(envName),
overrides: (preset, index) => loadPresetOverridesDescriptors(preset)(index),
overridesEnv: (preset, index, envName) => loadPresetOverridesEnvDescriptors(preset)(index)(envName)
});
exports.buildPresetChainWalker = buildPresetChainWalker;
const loadPresetDescriptors = (0, _caching.makeWeakCache)(preset => buildRootDescriptors(preset, preset.alias, _configDescriptors.createUncachedDescriptors));
const loadPresetEnvDescriptors = (0, _caching.makeWeakCache)(preset => (0, _caching.makeStrongCache)(envName => buildEnvDescriptors(preset, preset.alias, _configDescriptors.createUncachedDescriptors, envName)));
const loadPresetOverridesDescriptors = (0, _caching.makeWeakCache)(preset => (0, _caching.makeStrongCache)(index => buildOverrideDescriptors(preset, preset.alias, _configDescriptors.createUncachedDescriptors, index)));
const loadPresetOverridesEnvDescriptors = (0, _caching.makeWeakCache)(preset => (0, _caching.makeStrongCache)(index => (0, _caching.makeStrongCache)(envName => buildOverrideEnvDescriptors(preset, preset.alias, _configDescriptors.createUncachedDescriptors, index, envName))));
function buildRootChain(opts, context) {
const programmaticChain = loadProgrammaticChain({
options: opts,
dirname: context.cwd
}, context);
if (!programmaticChain) return null;
let configFile;
if (typeof opts.configFile === "string") {
configFile = (0, _files.loadConfig)(opts.configFile, context.cwd, context.envName, context.caller);
} else if (opts.configFile !== false) {
configFile = (0, _files.findRootConfig)(context.root, context.envName, context.caller);
}
let {
babelrc,
babelrcRoots
} = opts;
let babelrcRootsDirectory = context.cwd;
const configFileChain = emptyChain();
if (configFile) {
const validatedFile = validateConfigFile(configFile);
const result = loadFileChain(validatedFile, context);
if (!result) return null;
if (babelrc === undefined) {
babelrc = validatedFile.options.babelrc;
}
if (babelrcRoots === undefined) {
babelrcRootsDirectory = validatedFile.dirname;
babelrcRoots = validatedFile.options.babelrcRoots;
}
mergeChain(configFileChain, result);
}
const pkgData = typeof context.filename === "string" ? (0, _files.findPackageData)(context.filename) : null;
let ignoreFile, babelrcFile;
const fileChain = emptyChain();
if ((babelrc === true || babelrc === undefined) && pkgData && babelrcLoadEnabled(context, pkgData, babelrcRoots, babelrcRootsDirectory)) {
({
ignore: ignoreFile,
config: babelrcFile
} = (0, _files.findRelativeConfig)(pkgData, context.envName, context.caller));
if (ignoreFile && shouldIgnore(context, ignoreFile.ignore, null, ignoreFile.dirname)) {
return null;
}
if (babelrcFile) {
const result = loadFileChain(validateBabelrcFile(babelrcFile), context);
if (!result) return null;
mergeChain(fileChain, result);
}
}
const chain = mergeChain(mergeChain(mergeChain(emptyChain(), configFileChain), fileChain), programmaticChain);
return {
plugins: dedupDescriptors(chain.plugins),
presets: dedupDescriptors(chain.presets),
options: chain.options.map(o => normalizeOptions(o)),
ignore: ignoreFile || undefined,
babelrc: babelrcFile || undefined,
config: configFile || undefined
};
}
function babelrcLoadEnabled(context, pkgData, babelrcRoots, babelrcRootsDirectory) {
if (typeof babelrcRoots === "boolean") return babelrcRoots;
const absoluteRoot = context.root;
if (babelrcRoots === undefined) {
return pkgData.directories.indexOf(absoluteRoot) !== -1;
}
let babelrcPatterns = babelrcRoots;
if (!Array.isArray(babelrcPatterns)) babelrcPatterns = [babelrcPatterns];
babelrcPatterns = babelrcPatterns.map(pat => {
return typeof pat === "string" ? _path().default.resolve(babelrcRootsDirectory, pat) : pat;
});
if (babelrcPatterns.length === 1 && babelrcPatterns[0] === absoluteRoot) {
return pkgData.directories.indexOf(absoluteRoot) !== -1;
}
return babelrcPatterns.some(pat => {
if (typeof pat === "string") {
pat = (0, _patternToRegex.default)(pat, babelrcRootsDirectory);
}
return pkgData.directories.some(directory => {
return matchPattern(pat, babelrcRootsDirectory, directory, context);
});
});
}
const validateConfigFile = (0, _caching.makeWeakCache)(file => ({
filepath: file.filepath,
dirname: file.dirname,
options: (0, _options.validate)("configfile", file.options)
}));
const validateBabelrcFile = (0, _caching.makeWeakCache)(file => ({
filepath: file.filepath,
dirname: file.dirname,
options: (0, _options.validate)("babelrcfile", file.options)
}));
const validateExtendFile = (0, _caching.makeWeakCache)(file => ({
filepath: file.filepath,
dirname: file.dirname,
options: (0, _options.validate)("extendsfile", file.options)
}));
const loadProgrammaticChain = makeChainWalker({
root: input => buildRootDescriptors(input, "base", _configDescriptors.createCachedDescriptors),
env: (input, envName) => buildEnvDescriptors(input, "base", _configDescriptors.createCachedDescriptors, envName),
overrides: (input, index) => buildOverrideDescriptors(input, "base", _configDescriptors.createCachedDescriptors, index),
overridesEnv: (input, index, envName) => buildOverrideEnvDescriptors(input, "base", _configDescriptors.createCachedDescriptors, index, envName)
});
const loadFileChain = makeChainWalker({
root: file => loadFileDescriptors(file),
env: (file, envName) => loadFileEnvDescriptors(file)(envName),
overrides: (file, index) => loadFileOverridesDescriptors(file)(index),
overridesEnv: (file, index, envName) => loadFileOverridesEnvDescriptors(file)(index)(envName)
});
const loadFileDescriptors = (0, _caching.makeWeakCache)(file => buildRootDescriptors(file, file.filepath, _configDescriptors.createUncachedDescriptors));
const loadFileEnvDescriptors = (0, _caching.makeWeakCache)(file => (0, _caching.makeStrongCache)(envName => buildEnvDescriptors(file, file.filepath, _configDescriptors.createUncachedDescriptors, envName)));
const loadFileOverridesDescriptors = (0, _caching.makeWeakCache)(file => (0, _caching.makeStrongCache)(index => buildOverrideDescriptors(file, file.filepath, _configDescriptors.createUncachedDescriptors, index)));
const loadFileOverridesEnvDescriptors = (0, _caching.makeWeakCache)(file => (0, _caching.makeStrongCache)(index => (0, _caching.makeStrongCache)(envName => buildOverrideEnvDescriptors(file, file.filepath, _configDescriptors.createUncachedDescriptors, index, envName))));
function buildRootDescriptors({
dirname,
options
}, alias, descriptors) {
return descriptors(dirname, options, alias);
}
function buildEnvDescriptors({
dirname,
options
}, alias, descriptors, envName) {
const opts = options.env && options.env[envName];
return opts ? descriptors(dirname, opts, `${alias}.env["${envName}"]`) : null;
}
function buildOverrideDescriptors({
dirname,
options
}, alias, descriptors, index) {
const opts = options.overrides && options.overrides[index];
if (!opts) throw new Error("Assertion failure - missing override");
return descriptors(dirname, opts, `${alias}.overrides[${index}]`);
}
function buildOverrideEnvDescriptors({
dirname,
options
}, alias, descriptors, index, envName) {
const override = options.overrides && options.overrides[index];
if (!override) throw new Error("Assertion failure - missing override");
const opts = override.env && override.env[envName];
return opts ? descriptors(dirname, opts, `${alias}.overrides[${index}].env["${envName}"]`) : null;
}
function makeChainWalker({
root,
env,
overrides,
overridesEnv
}) {
return (input, context, files = new Set()) => {
const {
dirname
} = input;
const flattenedConfigs = [];
const rootOpts = root(input);
if (configIsApplicable(rootOpts, dirname, context)) {
flattenedConfigs.push(rootOpts);
const envOpts = env(input, context.envName);
if (envOpts && configIsApplicable(envOpts, dirname, context)) {
flattenedConfigs.push(envOpts);
}
(rootOpts.options.overrides || []).forEach((_, index) => {
const overrideOps = overrides(input, index);
if (configIsApplicable(overrideOps, dirname, context)) {
flattenedConfigs.push(overrideOps);
const overrideEnvOpts = overridesEnv(input, index, context.envName);
if (overrideEnvOpts && configIsApplicable(overrideEnvOpts, dirname, context)) {
flattenedConfigs.push(overrideEnvOpts);
}
}
});
}
if (flattenedConfigs.some(({
options: {
ignore,
only
}
}) => shouldIgnore(context, ignore, only, dirname))) {
return null;
}
const chain = emptyChain();
for (const op of flattenedConfigs) {
if (!mergeExtendsChain(chain, op.options, dirname, context, files)) {
return null;
}
mergeChainOpts(chain, op);
}
return chain;
};
}
function mergeExtendsChain(chain, opts, dirname, context, files) {
if (opts.extends === undefined) return true;
const file = (0, _files.loadConfig)(opts.extends, dirname, context.envName, context.caller);
if (files.has(file)) {
throw new Error(`Configuration cycle detected loading ${file.filepath}.\n` + `File already loaded following the config chain:\n` + Array.from(files, file => ` - ${file.filepath}`).join("\n"));
}
files.add(file);
const fileChain = loadFileChain(validateExtendFile(file), context, files);
files.delete(file);
if (!fileChain) return false;
mergeChain(chain, fileChain);
return true;
}
function mergeChain(target, source) {
target.options.push(...source.options);
target.plugins.push(...source.plugins);
target.presets.push(...source.presets);
return target;
}
function mergeChainOpts(target, {
options,
plugins,
presets
}) {
target.options.push(options);
target.plugins.push(...plugins());
target.presets.push(...presets());
return target;
}
function emptyChain() {
return {
options: [],
presets: [],
plugins: []
};
}
function normalizeOptions(opts) {
const options = Object.assign({}, opts);
delete options.extends;
delete options.env;
delete options.overrides;
delete options.plugins;
delete options.presets;
delete options.passPerPreset;
delete options.ignore;
delete options.only;
delete options.test;
delete options.include;
delete options.exclude;
if (options.hasOwnProperty("sourceMap")) {
options.sourceMaps = options.sourceMap;
delete options.sourceMap;
}
return options;
}
function dedupDescriptors(items) {
const map = new Map();
const descriptors = [];
for (const item of items) {
if (typeof item.value === "function") {
const fnKey = item.value;
let nameMap = map.get(fnKey);
if (!nameMap) {
nameMap = new Map();
map.set(fnKey, nameMap);
}
let desc = nameMap.get(item.name);
if (!desc) {
desc = {
value: item
};
descriptors.push(desc);
if (!item.ownPass) nameMap.set(item.name, desc);
} else {
desc.value = item;
}
} else {
descriptors.push({
value: item
});
}
}
return descriptors.reduce((acc, desc) => {
acc.push(desc.value);
return acc;
}, []);
}
function configIsApplicable({
options
}, dirname, context) {
return (options.test === undefined || configFieldIsApplicable(context, options.test, dirname)) && (options.include === undefined || configFieldIsApplicable(context, options.include, dirname)) && (options.exclude === undefined || !configFieldIsApplicable(context, options.exclude, dirname));
}
function configFieldIsApplicable(context, test, dirname) {
const patterns = Array.isArray(test) ? test : [test];
return matchesPatterns(context, patterns, dirname);
}
function shouldIgnore(context, ignore, only, dirname) {
if (ignore && matchesPatterns(context, ignore, dirname)) {
debug("Ignored %o because it matched one of %O from %o", context.filename, ignore, dirname);
return true;
}
if (only && !matchesPatterns(context, only, dirname)) {
debug("Ignored %o because it failed to match one of %O from %o", context.filename, only, dirname);
return true;
}
return false;
}
function matchesPatterns(context, patterns, dirname) {
return patterns.some(pattern => matchPattern(pattern, dirname, context.filename, context));
}
function matchPattern(pattern, dirname, pathToTest, context) {
if (typeof pattern === "function") {
return !!pattern(pathToTest, {
dirname,
envName: context.envName,
caller: context.caller
});
}
if (typeof pathToTest !== "string") {
throw new Error(`Configuration contains string/RegExp pattern, but no filename was passed to Babel`);
}
if (typeof pattern === "string") {
pattern = (0, _patternToRegex.default)(pattern, dirname);
}
return pattern.test(pathToTest);
}

@ -0,0 +1,210 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.createCachedDescriptors = createCachedDescriptors;
exports.createUncachedDescriptors = createUncachedDescriptors;
exports.createDescriptor = createDescriptor;
var _files = require("./files");
var _item = require("./item");
var _caching = require("./caching");
function isEqualDescriptor(a, b) {
return a.name === b.name && a.value === b.value && a.options === b.options && a.dirname === b.dirname && a.alias === b.alias && a.ownPass === b.ownPass && (a.file && a.file.request) === (b.file && b.file.request) && (a.file && a.file.resolved) === (b.file && b.file.resolved);
}
function createCachedDescriptors(dirname, options, alias) {
const {
plugins,
presets,
passPerPreset
} = options;
return {
options,
plugins: plugins ? () => createCachedPluginDescriptors(plugins, dirname)(alias) : () => [],
presets: presets ? () => createCachedPresetDescriptors(presets, dirname)(alias)(!!passPerPreset) : () => []
};
}
function createUncachedDescriptors(dirname, options, alias) {
let plugins;
let presets;
return {
options,
plugins: () => {
if (!plugins) {
plugins = createPluginDescriptors(options.plugins || [], dirname, alias);
}
return plugins;
},
presets: () => {
if (!presets) {
presets = createPresetDescriptors(options.presets || [], dirname, alias, !!options.passPerPreset);
}
return presets;
}
};
}
const PRESET_DESCRIPTOR_CACHE = new WeakMap();
const createCachedPresetDescriptors = (0, _caching.makeWeakCache)((items, cache) => {
const dirname = cache.using(dir => dir);
return (0, _caching.makeStrongCache)(alias => (0, _caching.makeStrongCache)(passPerPreset => createPresetDescriptors(items, dirname, alias, passPerPreset).map(desc => loadCachedDescriptor(PRESET_DESCRIPTOR_CACHE, desc))));
});
const PLUGIN_DESCRIPTOR_CACHE = new WeakMap();
const createCachedPluginDescriptors = (0, _caching.makeWeakCache)((items, cache) => {
const dirname = cache.using(dir => dir);
return (0, _caching.makeStrongCache)(alias => createPluginDescriptors(items, dirname, alias).map(desc => loadCachedDescriptor(PLUGIN_DESCRIPTOR_CACHE, desc)));
});
const DEFAULT_OPTIONS = {};
function loadCachedDescriptor(cache, desc) {
const {
value,
options = DEFAULT_OPTIONS
} = desc;
if (options === false) return desc;
let cacheByOptions = cache.get(value);
if (!cacheByOptions) {
cacheByOptions = new WeakMap();
cache.set(value, cacheByOptions);
}
let possibilities = cacheByOptions.get(options);
if (!possibilities) {
possibilities = [];
cacheByOptions.set(options, possibilities);
}
if (possibilities.indexOf(desc) === -1) {
const matches = possibilities.filter(possibility => isEqualDescriptor(possibility, desc));
if (matches.length > 0) {
return matches[0];
}
possibilities.push(desc);
}
return desc;
}
function createPresetDescriptors(items, dirname, alias, passPerPreset) {
return createDescriptors("preset", items, dirname, alias, passPerPreset);
}
function createPluginDescriptors(items, dirname, alias) {
return createDescriptors("plugin", items, dirname, alias);
}
function createDescriptors(type, items, dirname, alias, ownPass) {
const descriptors = items.map((item, index) => createDescriptor(item, dirname, {
type,
alias: `${alias}$${index}`,
ownPass: !!ownPass
}));
assertNoDuplicates(descriptors);
return descriptors;
}
function createDescriptor(pair, dirname, {
type,
alias,
ownPass
}) {
const desc = (0, _item.getItemDescriptor)(pair);
if (desc) {
return desc;
}
let name;
let options;
let value = pair;
if (Array.isArray(value)) {
if (value.length === 3) {
[value, options, name] = value;
} else {
[value, options] = value;
}
}
let file = undefined;
let filepath = null;
if (typeof value === "string") {
if (typeof type !== "string") {
throw new Error("To resolve a string-based item, the type of item must be given");
}
const resolver = type === "plugin" ? _files.loadPlugin : _files.loadPreset;
const request = value;
({
filepath,
value
} = resolver(value, dirname));
file = {
request,
resolved: filepath
};
}
if (!value) {
throw new Error(`Unexpected falsy value: ${String(value)}`);
}
if (typeof value === "object" && value.__esModule) {
if (value.default) {
value = value.default;
} else {
throw new Error("Must export a default export when using ES6 modules.");
}
}
if (typeof value !== "object" && typeof value !== "function") {
throw new Error(`Unsupported format: ${typeof value}. Expected an object or a function.`);
}
if (filepath !== null && typeof value === "object" && value) {
throw new Error(`Plugin/Preset files are not allowed to export objects, only functions. In ${filepath}`);
}
return {
name,
alias: filepath || alias,
value,
options,
dirname,
ownPass,
file
};
}
function assertNoDuplicates(items) {
const map = new Map();
for (const item of items) {
if (typeof item.value !== "function") continue;
let nameMap = map.get(item.value);
if (!nameMap) {
nameMap = new Set();
map.set(item.value, nameMap);
}
if (nameMap.has(item.name)) {
throw new Error([`Duplicate plugin/preset detected.`, `If you'd like to use two separate instances of a plugin,`, `they need separate names, e.g.`, ``, ` plugins: [`, ` ['some-plugin', {}],`, ` ['some-plugin', {}, 'some unique name'],`, ` ]`].join("\n"));
}
nameMap.add(item.name);
}
}

@ -0,0 +1,323 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.findConfigUpwards = findConfigUpwards;
exports.findRelativeConfig = findRelativeConfig;
exports.findRootConfig = findRootConfig;
exports.loadConfig = loadConfig;
function _debug() {
const data = _interopRequireDefault(require("debug"));
_debug = function () {
return data;
};
return data;
}
function _path() {
const data = _interopRequireDefault(require("path"));
_path = function () {
return data;
};
return data;
}
function _fs() {
const data = _interopRequireDefault(require("fs"));
_fs = function () {
return data;
};
return data;
}
function _json() {
const data = _interopRequireDefault(require("json5"));
_json = function () {
return data;
};
return data;
}
function _resolve() {
const data = _interopRequireDefault(require("resolve"));
_resolve = function () {
return data;
};
return data;
}
var _caching = require("../caching");
var _configApi = _interopRequireDefault(require("../helpers/config-api"));
var _utils = require("./utils");
var _patternToRegex = _interopRequireDefault(require("../pattern-to-regex"));
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
const debug = (0, _debug().default)("babel:config:loading:files:configuration");
const BABEL_CONFIG_JS_FILENAME = "babel.config.js";
const BABELRC_FILENAME = ".babelrc";
const BABELRC_JS_FILENAME = ".babelrc.js";
const BABELIGNORE_FILENAME = ".babelignore";
function findConfigUpwards(rootDir) {
let dirname = rootDir;
while (true) {
if (_fs().default.existsSync(_path().default.join(dirname, BABEL_CONFIG_JS_FILENAME))) {
return dirname;
}
const nextDir = _path().default.dirname(dirname);
if (dirname === nextDir) break;
dirname = nextDir;
}
return null;
}
function findRelativeConfig(packageData, envName, caller) {
let config = null;
let ignore = null;
const dirname = _path().default.dirname(packageData.filepath);
for (const loc of packageData.directories) {
if (!config) {
config = [BABELRC_FILENAME, BABELRC_JS_FILENAME].reduce((previousConfig, name) => {
const filepath = _path().default.join(loc, name);
const config = readConfig(filepath, envName, caller);
if (config && previousConfig) {
throw new Error(`Multiple configuration files found. Please remove one:\n` + ` - ${_path().default.basename(previousConfig.filepath)}\n` + ` - ${name}\n` + `from ${loc}`);
}
return config || previousConfig;
}, null);
const pkgConfig = packageData.pkg && packageData.pkg.dirname === loc ? packageToBabelConfig(packageData.pkg) : null;
if (pkgConfig) {
if (config) {
throw new Error(`Multiple configuration files found. Please remove one:\n` + ` - ${_path().default.basename(pkgConfig.filepath)}#babel\n` + ` - ${_path().default.basename(config.filepath)}\n` + `from ${loc}`);
}
config = pkgConfig;
}
if (config) {
debug("Found configuration %o from %o.", config.filepath, dirname);
}
}
if (!ignore) {
const ignoreLoc = _path().default.join(loc, BABELIGNORE_FILENAME);
ignore = readIgnoreConfig(ignoreLoc);
if (ignore) {
debug("Found ignore %o from %o.", ignore.filepath, dirname);
}
}
}
return {
config,
ignore
};
}
function findRootConfig(dirname, envName, caller) {
const filepath = _path().default.resolve(dirname, BABEL_CONFIG_JS_FILENAME);
const conf = readConfig(filepath, envName, caller);
if (conf) {
debug("Found root config %o in $o.", BABEL_CONFIG_JS_FILENAME, dirname);
}
return conf;
}
function loadConfig(name, dirname, envName, caller) {
const filepath = _resolve().default.sync(name, {
basedir: dirname
});
const conf = readConfig(filepath, envName, caller);
if (!conf) {
throw new Error(`Config file ${filepath} contains no configuration data`);
}
debug("Loaded config %o from $o.", name, dirname);
return conf;
}
function readConfig(filepath, envName, caller) {
return _path().default.extname(filepath) === ".js" ? readConfigJS(filepath, {
envName,
caller
}) : readConfigJSON5(filepath);
}
const LOADING_CONFIGS = new Set();
const readConfigJS = (0, _caching.makeStrongCache)((filepath, cache) => {
if (!_fs().default.existsSync(filepath)) {
cache.forever();
return null;
}
if (LOADING_CONFIGS.has(filepath)) {
cache.never();
debug("Auto-ignoring usage of config %o.", filepath);
return {
filepath,
dirname: _path().default.dirname(filepath),
options: {}
};
}
let options;
try {
LOADING_CONFIGS.add(filepath);
const configModule = require(filepath);
options = configModule && configModule.__esModule ? configModule.default || undefined : configModule;
} catch (err) {
err.message = `${filepath}: Error while loading config - ${err.message}`;
throw err;
} finally {
LOADING_CONFIGS.delete(filepath);
}
if (typeof options === "function") {
options = options((0, _configApi.default)(cache));
if (!cache.configured()) throwConfigError();
}
if (!options || typeof options !== "object" || Array.isArray(options)) {
throw new Error(`${filepath}: Configuration should be an exported JavaScript object.`);
}
if (typeof options.then === "function") {
throw new Error(`You appear to be using an async configuration, ` + `which your current version of Babel does not support. ` + `We may add support for this in the future, ` + `but if you're on the most recent version of @babel/core and still ` + `seeing this error, then you'll need to synchronously return your config.`);
}
return {
filepath,
dirname: _path().default.dirname(filepath),
options
};
});
const packageToBabelConfig = (0, _caching.makeWeakCache)(file => {
const babel = file.options["babel"];
if (typeof babel === "undefined") return null;
if (typeof babel !== "object" || Array.isArray(babel) || babel === null) {
throw new Error(`${file.filepath}: .babel property must be an object`);
}
return {
filepath: file.filepath,
dirname: file.dirname,
options: babel
};
});
const readConfigJSON5 = (0, _utils.makeStaticFileCache)((filepath, content) => {
let options;
try {
options = _json().default.parse(content);
} catch (err) {
err.message = `${filepath}: Error while parsing config - ${err.message}`;
throw err;
}
if (!options) throw new Error(`${filepath}: No config detected`);
if (typeof options !== "object") {
throw new Error(`${filepath}: Config returned typeof ${typeof options}`);
}
if (Array.isArray(options)) {
throw new Error(`${filepath}: Expected config object but found array`);
}
return {
filepath,
dirname: _path().default.dirname(filepath),
options
};
});
const readIgnoreConfig = (0, _utils.makeStaticFileCache)((filepath, content) => {
const ignoreDir = _path().default.dirname(filepath);
const ignorePatterns = content.split("\n").map(line => line.replace(/#(.*?)$/, "").trim()).filter(line => !!line);
for (const pattern of ignorePatterns) {
if (pattern[0] === "!") {
throw new Error(`Negation of file paths is not supported.`);
}
}
return {
filepath,
dirname: _path().default.dirname(filepath),
ignore: ignorePatterns.map(pattern => (0, _patternToRegex.default)(pattern, ignoreDir))
};
});
function throwConfigError() {
throw new Error(`\
Caching was left unconfigured. Babel's plugins, presets, and .babelrc.js files can be configured
for various types of caching, using the first param of their handler functions:
module.exports = function(api) {
// The API exposes the following:
// Cache the returned value forever and don't call this function again.
api.cache(true);
// Don't cache at all. Not recommended because it will be very slow.
api.cache(false);
// Cached based on the value of some function. If this function returns a value different from
// a previously-encountered value, the plugins will re-evaluate.
var env = api.cache(() => process.env.NODE_ENV);
// If testing for a specific env, we recommend specifics to avoid instantiating a plugin for
// any possible NODE_ENV value that might come up during plugin execution.
var isProd = api.cache(() => process.env.NODE_ENV === "production");
// .cache(fn) will perform a linear search though instances to find the matching plugin based
// based on previous instantiated plugins. If you want to recreate the plugin and discard the
// previous instance whenever something changes, you may use:
var isProd = api.cache.invalidate(() => process.env.NODE_ENV === "production");
// Note, we also expose the following more-verbose versions of the above examples:
api.cache.forever(); // api.cache(true)
api.cache.never(); // api.cache(false)
api.cache.using(fn); // api.cache(fn)
// Return the value that will be cached.
return { };
};`);
}

@ -0,0 +1,59 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.findConfigUpwards = findConfigUpwards;
exports.findPackageData = findPackageData;
exports.findRelativeConfig = findRelativeConfig;
exports.findRootConfig = findRootConfig;
exports.loadConfig = loadConfig;
exports.resolvePlugin = resolvePlugin;
exports.resolvePreset = resolvePreset;
exports.loadPlugin = loadPlugin;
exports.loadPreset = loadPreset;
function findConfigUpwards(rootDir) {
return null;
}
function findPackageData(filepath) {
return {
filepath,
directories: [],
pkg: null,
isPackage: false
};
}
function findRelativeConfig(pkgData, envName, caller) {
return {
pkg: null,
config: null,
ignore: null
};
}
function findRootConfig(dirname, envName, caller) {
return null;
}
function loadConfig(name, dirname, envName, caller) {
throw new Error(`Cannot load ${name} relative to ${dirname} in a browser`);
}
function resolvePlugin(name, dirname) {
return null;
}
function resolvePreset(name, dirname) {
return null;
}
function loadPlugin(name, dirname) {
throw new Error(`Cannot load plugin ${name} relative to ${dirname} in a browser`);
}
function loadPreset(name, dirname) {
throw new Error(`Cannot load preset ${name} relative to ${dirname} in a browser`);
}

Some files were not shown because too many files have changed in this diff Show More

Loading…
Cancel
Save