parent
27fa2329f9
commit
bd2d2f8b38
@ -1,6 +0,0 @@
|
||||
projectKey=clawer
|
||||
serverUrl=http://localhost:9000
|
||||
serverVersion=7.8.0.26217
|
||||
dashboardUrl=http://localhost:9000/dashboard?id=clawer
|
||||
ceTaskId=AZMv5JVBnAUFl5pPDUTm
|
||||
ceTaskUrl=http://localhost:9000/api/ce/task?id=AZMv5JVBnAUFl5pPDUTm
|
@ -1,21 +0,0 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2024 Suysker
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@ -1,90 +0,0 @@
|
||||
import pandas as pd
|
||||
import mysql.connector
|
||||
from mysql.connector import Error
|
||||
import os
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
# 数据库连接配置
|
||||
db_config = {
|
||||
'host': '152.136.166.253', # 修改这里,去掉端口号
|
||||
'port': 8989, # 单独指定端口号
|
||||
'database': 'fly_ticket',
|
||||
'user': 'root',
|
||||
'password': 'Cauc@2024'
|
||||
}
|
||||
|
||||
def import_csv_to_db(file_path, cursor):
|
||||
df = pd.read_csv(file_path)
|
||||
for index, row in df.iterrows():
|
||||
sql = """INSERT INTO flight (f_n, f_s_p, f_a_p, f_s_a, f_a_a, f_s_t, f_a_t, f_Date, f_Delay, f_p, f_food, f_wide, f_depcode, f_dstcode)
|
||||
VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)
|
||||
ON DUPLICATE KEY UPDATE
|
||||
f_s_p = VALUES(f_s_p),
|
||||
f_a_p = VALUES(f_a_p),
|
||||
f_s_a = VALUES(f_s_a),
|
||||
f_a_a = VALUES(f_a_a),
|
||||
f_s_t = VALUES(f_s_t),
|
||||
f_a_t = VALUES(f_a_t),
|
||||
f_Delay = VALUES(f_Delay),
|
||||
f_p = VALUES(f_p),
|
||||
f_food = VALUES(f_food),
|
||||
f_wide = VALUES(f_wide),
|
||||
f_depcode = VALUES(f_depcode),
|
||||
f_dstcode = VALUES(f_dstcode);"""
|
||||
|
||||
values = (
|
||||
row['航班号'],
|
||||
row['出发城市'],
|
||||
row['到达城市'],
|
||||
row['出发机场'],
|
||||
row['到达机场'],
|
||||
row['出发时间'],
|
||||
row['到达时间'],
|
||||
row['出发日期'],
|
||||
row['出发延误时间'],
|
||||
row['economy_origin'],
|
||||
row['经济舱餐食信息'],
|
||||
row['经济舱座椅间距'],
|
||||
row['出发机场三字码'],
|
||||
row['到达机场三字码']
|
||||
)
|
||||
|
||||
cursor.execute(sql, values)
|
||||
|
||||
try:
|
||||
# 连接到数据库
|
||||
conn = mysql.connector.connect(**db_config)
|
||||
|
||||
if conn.is_connected():
|
||||
cursor = conn.cursor()
|
||||
|
||||
# 设置日期范围
|
||||
start_date = datetime(2024, 11, 12)
|
||||
end_date = datetime(2024, 11, 20)
|
||||
current_date = start_date
|
||||
|
||||
while current_date <= end_date:
|
||||
folder_name = current_date.strftime("%Y-%m-%d")
|
||||
folder_path = os.path.join("D:\college\SE2\Ctrip-Crawler-main\Ctrip-Crawler-withComfortInfo", folder_name, "2024-11-12")
|
||||
|
||||
if os.path.exists(folder_path):
|
||||
for file_name in os.listdir(folder_path):
|
||||
if file_name.endswith('.csv'):
|
||||
file_path = os.path.join(folder_path, file_name)
|
||||
import_csv_to_db(file_path, cursor)
|
||||
print(f"已导入文件: {file_path}")
|
||||
|
||||
current_date += timedelta(days=1)
|
||||
|
||||
# 提交更改
|
||||
conn.commit()
|
||||
print("所有数据成功插入到数据库")
|
||||
|
||||
except Error as e:
|
||||
print(f"连接数据库时出错: {e}")
|
||||
|
||||
finally:
|
||||
if 'conn' in locals() and conn.is_connected():
|
||||
cursor.close()
|
||||
conn.close()
|
||||
print("数据库连接已关闭")
|
@ -1,412 +0,0 @@
|
||||
import io
|
||||
import os
|
||||
import gzip
|
||||
import time
|
||||
import json
|
||||
import random
|
||||
import requests
|
||||
import threading
|
||||
import pandas as pd
|
||||
from seleniumwire import webdriver
|
||||
from datetime import datetime as dt,timedelta
|
||||
from selenium.webdriver.common.by import By
|
||||
from selenium.webdriver.common.keys import Keys
|
||||
from selenium.common.exceptions import TimeoutException,StaleElementReferenceException,ElementNotInteractableException,ElementClickInterceptedException # 加载异常
|
||||
|
||||
|
||||
def getcitycode():
|
||||
cityname,code=[],[]
|
||||
#采用携程的api接口
|
||||
city_url='https://flights.ctrip.com/online/api/poi/get?v='+str(random.random())
|
||||
headers={
|
||||
'dnt':'1',
|
||||
'referer':'https://verify.ctrip.com/',
|
||||
'user-agent':'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/102.0.0.0 Safari/537.36'
|
||||
}
|
||||
r=requests.get(city_url,headers=headers)
|
||||
citys=json.loads(r.text).get('data')
|
||||
for city in citys:
|
||||
if city =='热门':
|
||||
continue
|
||||
for key in city:
|
||||
try:
|
||||
for k in citys[city][key]:
|
||||
cityname.append(k['display'])
|
||||
code.append(k['data'])
|
||||
except:
|
||||
continue
|
||||
citycode=dict(zip(cityname,code))
|
||||
|
||||
return cityname,citycode
|
||||
|
||||
|
||||
|
||||
class FLIGHT(object):
|
||||
def __init__(self):
|
||||
self.url = 'https://flights.ctrip.com/online/list/oneway' #携程机票查询页面
|
||||
self.chromeDriverPath = 'C:/Program Files/Google/Chrome/Application/chromedriver' #chromedriver位置
|
||||
self.options = webdriver.ChromeOptions() # 创建一个配置对象
|
||||
#self.options.add_argument('--incognito') # 隐身模式(无痕模式)
|
||||
#self.options.add_argument('User-Agent=%s'%UserAgent().random) # 替换User-Agent
|
||||
self.options.add_argument("--disable-blink-features")
|
||||
self.options.add_argument("--disable-blink-features=AutomationControlled")
|
||||
self.options.add_experimental_option("excludeSwitches", ['enable-automation'])# 不显示正在受自动化软件控制
|
||||
self.driver = webdriver.Chrome(executable_path=self.chromeDriverPath,chrome_options=self.options)
|
||||
self.driver.maximize_window()
|
||||
self.err=0#错误重试次数
|
||||
|
||||
|
||||
def getpage(self):
|
||||
##############获取地区码
|
||||
self.startcode=self.citycode[self.city[0]][-3:]
|
||||
self.endcode=self.citycode[self.city[1]][-3:]
|
||||
|
||||
##############生成访问链接
|
||||
flights_url=self.url+'-'+self.startcode+'-'+self.endcode+'?&depdate='+self.date
|
||||
print(flights_url)
|
||||
##############设置加载超时阈值
|
||||
self.driver.set_page_load_timeout(300)
|
||||
try:
|
||||
self.driver.get(flights_url)
|
||||
except:
|
||||
print('页面连接失败')
|
||||
self.driver.close()
|
||||
self.getpage()
|
||||
else:
|
||||
try:
|
||||
##############判断是否存在验证码
|
||||
self.driver.find_element(By.CLASS_NAME,"basic-alert.alert-giftinfo")
|
||||
print('等待2小时后重试')
|
||||
time.sleep(7200)
|
||||
self.getpage()
|
||||
except:
|
||||
##############不存在验证码,执行下一步
|
||||
self.remove_btn()
|
||||
|
||||
def remove_btn(self):
|
||||
try:
|
||||
js_remove="$('.notice-box').remove();"
|
||||
self.driver.execute_script(js_remove)
|
||||
except Exception as e:
|
||||
print('防疫移除失败',e)
|
||||
else:
|
||||
self.changecity()
|
||||
|
||||
|
||||
|
||||
def changecity(self):
|
||||
try:
|
||||
#获取出发地与目的地元素位置
|
||||
its=self.driver.find_elements(By.CLASS_NAME,'form-input-v3')
|
||||
|
||||
#若出发地与目标值不符,则更改出发地
|
||||
while self.city[0] not in its[0].get_attribute('value'):
|
||||
its[0].click()
|
||||
time.sleep(0.5)
|
||||
its[0].send_keys(Keys.CONTROL + 'a')
|
||||
time.sleep(0.5)
|
||||
its[0].send_keys(self.city[0])
|
||||
|
||||
time.sleep(0.5)
|
||||
|
||||
#若目的地与目标值不符,则更改目的地
|
||||
while self.city[1] not in its[1].get_attribute('value'):
|
||||
its[1].click()
|
||||
time.sleep(0.5)
|
||||
its[1].send_keys(Keys.CONTROL + 'a')
|
||||
time.sleep(0.5)
|
||||
its[1].send_keys(self.city[1])
|
||||
|
||||
time.sleep(0.5)
|
||||
try:
|
||||
#通过低价提醒按钮实现enter键换页
|
||||
self.driver.implicitly_wait(5) # seconds
|
||||
self.driver.find_elements(By.CLASS_NAME,'low-price-remind')[0].click()
|
||||
except IndexError as e:
|
||||
print('\n更换城市错误 找不到元素',e)
|
||||
#以防万一
|
||||
its[1].send_keys(Keys.ENTER)
|
||||
|
||||
print('\n更换城市成功',self.city[0]+'-'+self.city[1])
|
||||
except (ElementNotInteractableException,StaleElementReferenceException,ElementClickInterceptedException,ElementClickInterceptedException) as e:
|
||||
print('\n更换城市错误 元素错误',e)
|
||||
self.err+=1
|
||||
if self.err<=5:
|
||||
self.click_btn()
|
||||
else:
|
||||
self.err=0
|
||||
del self.driver.requests
|
||||
self.getpage()
|
||||
except Exception as e:
|
||||
print('\n更换城市错误',e)
|
||||
#删除本次请求
|
||||
del self.driver.requests
|
||||
#从头开始重新执行程序
|
||||
self.getpage()
|
||||
else:
|
||||
#若无错误,执行下一步
|
||||
self.err=0
|
||||
self.getdata()
|
||||
|
||||
|
||||
|
||||
def getdata(self):
|
||||
try:
|
||||
#等待响应加载完成
|
||||
self.predata = self.driver.wait_for_request('/international/search/api/search/batchSearch?.*', timeout=60)
|
||||
|
||||
rb=dict(json.loads(self.predata.body).get('flightSegments')[0])
|
||||
|
||||
except TimeoutException as e:
|
||||
print('\获取数据错误',e)
|
||||
#删除本次请求
|
||||
del self.driver.requests
|
||||
#从头开始重新执行程序
|
||||
self.getpage()
|
||||
else:
|
||||
#检查数据获取正确性
|
||||
if rb['departureCityName'] == self.city[0] and rb['arrivalCityName'] == self.city[1]:
|
||||
print('城市获取正确')
|
||||
#删除本次请求
|
||||
del self.driver.requests
|
||||
#若无错误,执行下一步
|
||||
self.decode_data()
|
||||
else:
|
||||
#删除本次请求
|
||||
del self.driver.requests
|
||||
#重新更换城市
|
||||
self.changecity()
|
||||
|
||||
|
||||
|
||||
def decode_data(self):
|
||||
try:
|
||||
buf = io.BytesIO(self.predata.response.body)
|
||||
gf = gzip.GzipFile(fileobj = buf)
|
||||
self.dedata = gf.read().decode('UTF-8')
|
||||
self.dedata=json.loads(self.dedata)
|
||||
except:
|
||||
print('重新获取数据')
|
||||
self.getpage()
|
||||
else:
|
||||
#若无错误,执行下一步
|
||||
self.check_data()
|
||||
|
||||
|
||||
|
||||
def check_data(self):
|
||||
try:
|
||||
self.flightItineraryList=self.dedata['data']['flightItineraryList']
|
||||
#倒序遍历,删除转机航班
|
||||
for i in range(len(self.flightItineraryList)-1, -1, -1):
|
||||
if self.flightItineraryList[i]['flightSegments'][0]['transferCount'] !=0:
|
||||
self.flightItineraryList.pop(i)
|
||||
if len(self.flightItineraryList):
|
||||
#存在直航航班,执行下一步
|
||||
self.muti_process()
|
||||
else:
|
||||
print('不存在直航航班')
|
||||
return 0
|
||||
except:
|
||||
print('不存在直航航班')
|
||||
return 0
|
||||
|
||||
|
||||
def muti_process(self):
|
||||
processes = []
|
||||
|
||||
self.flights = pd.DataFrame()
|
||||
self.prices = pd.DataFrame()
|
||||
#处理航班信息
|
||||
processes.append(threading.Thread(target=self.proc_flightSegments))
|
||||
#处理票价信息
|
||||
processes.append(threading.Thread(target=self.proc_priceList))
|
||||
|
||||
for pro in processes:
|
||||
pro.start()
|
||||
for pro in processes:
|
||||
pro.join()
|
||||
|
||||
#若无错误,执行下一步
|
||||
self.mergedata()
|
||||
|
||||
def proc_flightSegments(self):
|
||||
for flightlist in self.flightItineraryList:
|
||||
flightlist=flightlist['flightSegments'][0]['flightList']
|
||||
flightUnitList=dict(flightlist[0])
|
||||
|
||||
|
||||
departureday=flightUnitList['departureDateTime'].split(' ')[0]
|
||||
departuretime=flightUnitList['departureDateTime'].split(' ')[1]
|
||||
|
||||
arrivalday=flightUnitList['arrivalDateTime'].split(' ')[0]
|
||||
arrivaltime=flightUnitList['arrivalDateTime'].split(' ')[1]
|
||||
|
||||
#删除一些不重要的信息
|
||||
dellist=['sequenceNo', 'marketAirlineCode',
|
||||
'departureProvinceId','departureCityId','departureCityCode','departureAirportShortName','departureTerminal',
|
||||
'arrivalProvinceId','arrivalCityId','arrivalCityCode','arrivalAirportShortName','arrivalTerminal',
|
||||
'transferDuration','stopList','leakedVisaTagSwitch','trafficType','highLightPlaneNo','mealType',
|
||||
'operateAirlineCode','arrivalDateTime','departureDateTime','operateFlightNo','operateAirlineName']
|
||||
for value in dellist:
|
||||
try:
|
||||
flightUnitList.pop(value)
|
||||
except:
|
||||
continue
|
||||
|
||||
#更新日期格式
|
||||
flightUnitList.update({'departureday': departureday, 'departuretime': departuretime,
|
||||
'arrivalday': arrivalday, 'arrivaltime': arrivaltime})
|
||||
|
||||
|
||||
self.flights=pd.concat([self.flights,pd.DataFrame(flightUnitList,index=[0])],ignore_index=True)
|
||||
|
||||
|
||||
|
||||
def proc_priceList(self):
|
||||
for flightlist in self.flightItineraryList:
|
||||
flightNo=flightlist['itineraryId'].split('_')[0]
|
||||
priceList=flightlist['priceList']
|
||||
|
||||
#经济舱,经济舱折扣
|
||||
economy,economy_discount=[],[]
|
||||
#商务舱,商务舱折扣
|
||||
bussiness,bussiness_discount=[],[]
|
||||
|
||||
for price in priceList:
|
||||
adultPrice=price['adultPrice']
|
||||
cabin=price['cabin']
|
||||
priceUnitList=dict(price['priceUnitList'][0]['flightSeatList'][0])
|
||||
discountRate=priceUnitList['discountRate']
|
||||
#经济舱
|
||||
if cabin=='Y':
|
||||
economy.append(adultPrice)
|
||||
economy_discount.append(discountRate)
|
||||
#商务舱
|
||||
elif cabin=='C':
|
||||
bussiness.append(adultPrice)
|
||||
bussiness_discount.append(discountRate)
|
||||
|
||||
if economy !=[]:
|
||||
try:
|
||||
economy_origin=economy[economy_discount.index(1)]
|
||||
except:
|
||||
economy_origin=int(max(economy)/max(economy_discount))
|
||||
|
||||
if min(economy_discount) !=1:
|
||||
economy_low=min(economy)
|
||||
economy_cut=min(economy_discount)
|
||||
else:
|
||||
economy_low=''
|
||||
economy_cut=''
|
||||
|
||||
else:
|
||||
economy_origin=''
|
||||
economy_low=''
|
||||
economy_cut=''
|
||||
|
||||
|
||||
if bussiness !=[]:
|
||||
try:
|
||||
bussiness_origin=bussiness[bussiness_discount.index(1)]
|
||||
except:
|
||||
bussiness_origin=int(max(bussiness)/max(bussiness_discount))
|
||||
|
||||
if min(bussiness_discount) !=1:
|
||||
bussiness_low=min(bussiness)
|
||||
bussiness_cut=min(bussiness_discount)
|
||||
else:
|
||||
bussiness_low=''
|
||||
bussiness_cut=''
|
||||
|
||||
else:
|
||||
bussiness_origin=''
|
||||
bussiness_low=''
|
||||
bussiness_cut=''
|
||||
|
||||
price_info={'flightNo':flightNo,
|
||||
'economy_origin':economy_origin,'economy_low':economy_low,'economy_cut':economy_cut,
|
||||
'bussiness_origin':bussiness_origin,'bussiness_low':bussiness_low,'bussiness_cut':bussiness_cut}
|
||||
|
||||
#self.prices=self.prices.append(price_info,ignore_index=True)
|
||||
self.prices=pd.concat([self.prices,pd.DataFrame(price_info,index=[0])],ignore_index=True)
|
||||
|
||||
|
||||
|
||||
def mergedata(self):
|
||||
try:
|
||||
self.df = self.flights.merge(self.prices,on=['flightNo'])
|
||||
|
||||
self.df['数据获取日期']=dt.now().strftime('%Y-%m-%d')
|
||||
|
||||
#对pandas的columns进行重命名
|
||||
order=['数据获取日期','航班号','航空公司',
|
||||
'出发日期','出发时间','到达日期','到达时间','飞行时长','出发国家','出发城市','出发机场','出发机场三字码',
|
||||
'到达国家','到达城市','到达机场','到达机场三字码','飞机型号','飞机尺寸','飞机型号三字码',
|
||||
'经济舱原价','经济舱最低价','经济舱折扣','商务舱原价','商务舱最低价','商务舱折扣',
|
||||
'到达准点率','停留次数']
|
||||
|
||||
origin=['数据获取日期','flightNo','marketAirlineName',
|
||||
'departureday','departuretime','arrivalday','arrivaltime','duration',
|
||||
'departureCountryName','departureCityName','departureAirportName','departureAirportCode',
|
||||
'arrivalCountryName','arrivalCityName','arrivalAirportName','arrivalAirportCode',
|
||||
'aircraftName','aircraftSize','aircraftCode',
|
||||
'economy_origin','economy_low','economy_cut',
|
||||
'bussiness_origin','bussiness_low','bussiness_cut',
|
||||
'arrivalPunctuality','stopCount']
|
||||
|
||||
columns=dict(zip(origin,order))
|
||||
|
||||
self.df=self.df.rename(columns=columns)
|
||||
|
||||
self.df = self.df[order]
|
||||
|
||||
|
||||
if not os.path.exists(self.date):
|
||||
os.makedirs(self.date)
|
||||
|
||||
filename=os.getcwd()+'\\'+self.date+'\\'+self.date+'-'+self.city[0]+'-'+self.city[1]+'.csv'
|
||||
|
||||
self.df.to_csv(filename,encoding='GB18030',index=False)
|
||||
|
||||
print('\n数据爬取完成',filename)
|
||||
except Exception as e:
|
||||
print('合并数据失败',e)
|
||||
|
||||
|
||||
def demain(self,citys,citycode):
|
||||
self.citycode=citycode
|
||||
#设置出发日期
|
||||
self.date=dt.now()+timedelta(days=7)
|
||||
self.date=self.date.strftime('%Y-%m-%d')
|
||||
|
||||
for city in citys:
|
||||
self.city=city
|
||||
|
||||
if citys.index(city)==0:
|
||||
#第一次运行
|
||||
self.getpage()
|
||||
else:
|
||||
#后续运行只需更换出发与目的地
|
||||
self.changecity()
|
||||
|
||||
#运行结束退出
|
||||
self.driver.quit()
|
||||
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
citys=[]
|
||||
cityname,citycode=getcitycode()
|
||||
city=['上海','广州','深圳','北京']
|
||||
ytic=list(reversed(city))
|
||||
for m in city:
|
||||
for n in ytic:
|
||||
if m==n:
|
||||
continue
|
||||
else:
|
||||
citys.append([m,n])
|
||||
fly = FLIGHT()
|
||||
fly.demain(citys,citycode)
|
||||
print('\n程序运行完成!!!!')
|
||||
|
@ -1,397 +0,0 @@
|
||||
import io
|
||||
import os
|
||||
import gzip
|
||||
import time
|
||||
import json
|
||||
import threading
|
||||
import pandas as pd
|
||||
from seleniumwire import webdriver
|
||||
from datetime import datetime as dt,timedelta
|
||||
from selenium.webdriver.common.by import By
|
||||
from selenium.webdriver.common.keys import Keys
|
||||
from selenium.common.exceptions import TimeoutException,StaleElementReferenceException,ElementNotInteractableException,ElementClickInterceptedException # 加载异常
|
||||
|
||||
|
||||
|
||||
class FLIGHT(object):
|
||||
def __init__(self):
|
||||
self.chromeDriverPath = 'C:/Program Files/Google/Chrome/Application/chromedriver' #chromedriver位置
|
||||
self.options = webdriver.ChromeOptions() # 创建一个配置对象
|
||||
self.options.add_argument('--incognito') # 隐身模式(无痕模式)
|
||||
self.options.add_argument("--disable-blink-features")
|
||||
self.options.add_argument("--disable-blink-features=AutomationControlled")
|
||||
self.options.add_experimental_option("excludeSwitches", ['enable-automation'])# 不显示正在受自动化软件控制
|
||||
self.driver = webdriver.Chrome(executable_path=self.chromeDriverPath,chrome_options=self.options)
|
||||
self.driver.set_page_load_timeout(300)#设置加载超时阈值
|
||||
self.driver.maximize_window()
|
||||
self.err=0#错误重试次数
|
||||
#前往首页
|
||||
self.driver.get('https://flights.ctrip.com/online/channel/domestic')
|
||||
|
||||
|
||||
|
||||
def getpage(self):
|
||||
try:
|
||||
self.driver.find_element(By.CLASS_NAME,'pc_home-jipiao').click()#点击飞机图标,返回主界面
|
||||
self.driver.implicitly_wait(5) # seconds
|
||||
self.driver.find_elements(By.CLASS_NAME,'radio-label')[0].click()#单程
|
||||
|
||||
while self.driver.find_elements(By.CSS_SELECTOR,"[aria-label=请选择日期]")[0].get_attribute("value") != self.date:
|
||||
|
||||
self.driver.find_element(By.CLASS_NAME,'modifyDate.depart-date').click()#点击日期选择
|
||||
|
||||
for m in self.driver.find_elements(By.CLASS_NAME,'date-picker.date-picker-block'):
|
||||
|
||||
if int(m.find_element(By.CLASS_NAME,'month').text[:-1]) != int(self.date[5:7]):
|
||||
continue
|
||||
|
||||
for d in m.find_elements(By.CLASS_NAME,'date-d'):
|
||||
if int(d.text) == int(self.date[-2:]):
|
||||
d.click()
|
||||
break
|
||||
|
||||
self.driver.find_element(By.CLASS_NAME,'search-btn').click()#搜索
|
||||
|
||||
except:
|
||||
print('页面连接失败')
|
||||
self.driver.close()
|
||||
self.getpage()
|
||||
else:
|
||||
try:
|
||||
##############判断是否存在验证码
|
||||
self.driver.find_element(By.ID,"verification-code")
|
||||
print('等待2小时后重试')
|
||||
time.sleep(7200)
|
||||
self.getpage()
|
||||
except:
|
||||
##############不存在验证码,执行下一步
|
||||
self.changecity()
|
||||
|
||||
def remove_btn(self):
|
||||
try:
|
||||
js_remove="$('.notice-box').remove();"
|
||||
self.driver.execute_script(js_remove)
|
||||
except Exception as e:
|
||||
print('防疫移除失败',e)
|
||||
|
||||
|
||||
def changecity(self):
|
||||
|
||||
#移除防疫提醒
|
||||
self.remove_btn()
|
||||
|
||||
try:
|
||||
#获取出发地与目的地元素位置
|
||||
its=self.driver.find_elements(By.CLASS_NAME,'form-input-v3')
|
||||
|
||||
#若出发地与目标值不符,则更改出发地
|
||||
while self.city[0] not in its[0].get_attribute('value'):
|
||||
its[0].click()
|
||||
time.sleep(0.5)
|
||||
its[0].send_keys(Keys.CONTROL + 'a')
|
||||
time.sleep(0.5)
|
||||
its[0].send_keys(self.city[0])
|
||||
|
||||
time.sleep(0.5)
|
||||
|
||||
#若目的地与目标值不符,则更改目的地
|
||||
while self.city[1] not in its[1].get_attribute('value'):
|
||||
its[1].click()
|
||||
time.sleep(0.5)
|
||||
its[1].send_keys(Keys.CONTROL + 'a')
|
||||
time.sleep(0.5)
|
||||
its[1].send_keys(self.city[1])
|
||||
|
||||
time.sleep(0.5)
|
||||
try:
|
||||
#通过低价提醒按钮实现enter键换页
|
||||
self.driver.implicitly_wait(5) # seconds
|
||||
self.driver.find_elements(By.CLASS_NAME,'low-price-remind')[0].click()
|
||||
except IndexError as e:
|
||||
print('\n更换城市错误 找不到元素',e)
|
||||
#以防万一
|
||||
its[1].send_keys(Keys.ENTER)
|
||||
|
||||
print('\n更换城市成功',self.city[0]+'-'+self.city[1])
|
||||
#捕获错误
|
||||
except (IndexError,ElementNotInteractableException,StaleElementReferenceException,ElementClickInterceptedException,ElementClickInterceptedException) as e:
|
||||
print('\n更换城市错误 元素错误',e)
|
||||
self.err+=1
|
||||
if self.err<=5:
|
||||
self.changecity()
|
||||
else:
|
||||
self.err=0
|
||||
del self.driver.requests
|
||||
self.getpage()
|
||||
except Exception as e:
|
||||
print('\n更换城市错误',e)
|
||||
#删除本次请求
|
||||
del self.driver.requests
|
||||
#从头开始重新执行程序
|
||||
self.getpage()
|
||||
else:
|
||||
#若无错误,执行下一步
|
||||
self.err=0
|
||||
self.getdata()
|
||||
|
||||
|
||||
|
||||
def getdata(self):
|
||||
try:
|
||||
#等待响应加载完成
|
||||
self.predata = self.driver.wait_for_request('/international/search/api/search/batchSearch?.*', timeout=30)
|
||||
|
||||
rb=dict(json.loads(self.predata.body).get('flightSegments')[0])
|
||||
|
||||
except TimeoutException as e:
|
||||
print('\获取数据错误',e)
|
||||
#删除本次请求
|
||||
del self.driver.requests
|
||||
#从头开始重新执行程序
|
||||
self.getpage()
|
||||
else:
|
||||
#检查数据获取正确性
|
||||
if rb['departureCityName'] == self.city[0] and rb['arrivalCityName'] == self.city[1]:
|
||||
print('城市获取正确')
|
||||
#删除本次请求
|
||||
del self.driver.requests
|
||||
#若无错误,执行下一步
|
||||
self.decode_data()
|
||||
else:
|
||||
#删除本次请求
|
||||
del self.driver.requests
|
||||
#重新更换城市
|
||||
self.changecity()
|
||||
|
||||
|
||||
|
||||
def decode_data(self):
|
||||
try:
|
||||
buf = io.BytesIO(self.predata.response.body)
|
||||
gf = gzip.GzipFile(fileobj = buf)
|
||||
self.dedata = gf.read().decode('UTF-8')
|
||||
self.dedata=json.loads(self.dedata)
|
||||
except:
|
||||
print('重新获取数据')
|
||||
self.getpage()
|
||||
else:
|
||||
#若无错误,执行下一步
|
||||
self.check_data()
|
||||
|
||||
|
||||
|
||||
def check_data(self):
|
||||
try:
|
||||
self.flightItineraryList=self.dedata['data']['flightItineraryList']
|
||||
#倒序遍历,删除转机航班
|
||||
for i in range(len(self.flightItineraryList)-1, -1, -1):
|
||||
if self.flightItineraryList[i]['flightSegments'][0]['transferCount'] !=0:
|
||||
self.flightItineraryList.pop(i)
|
||||
if len(self.flightItineraryList):
|
||||
#存在直航航班,执行下一步
|
||||
self.muti_process()
|
||||
else:
|
||||
print('不存在直航航班')
|
||||
return 0
|
||||
except:
|
||||
print('不存在直航航班')
|
||||
return 0
|
||||
|
||||
|
||||
def muti_process(self):
|
||||
processes = []
|
||||
|
||||
self.flights = pd.DataFrame()
|
||||
self.prices = pd.DataFrame()
|
||||
#处理航班信息
|
||||
processes.append(threading.Thread(target=self.proc_flightSegments))
|
||||
#处理票价信息
|
||||
processes.append(threading.Thread(target=self.proc_priceList))
|
||||
|
||||
for pro in processes:
|
||||
pro.start()
|
||||
for pro in processes:
|
||||
pro.join()
|
||||
|
||||
#若无错误,执行下一步
|
||||
self.mergedata()
|
||||
|
||||
def proc_flightSegments(self):
|
||||
for flightlist in self.flightItineraryList:
|
||||
flightlist=flightlist['flightSegments'][0]['flightList']
|
||||
flightUnitList=dict(flightlist[0])
|
||||
|
||||
|
||||
departureday=flightUnitList['departureDateTime'].split(' ')[0]
|
||||
departuretime=flightUnitList['departureDateTime'].split(' ')[1]
|
||||
|
||||
arrivalday=flightUnitList['arrivalDateTime'].split(' ')[0]
|
||||
arrivaltime=flightUnitList['arrivalDateTime'].split(' ')[1]
|
||||
|
||||
#删除一些不重要的信息
|
||||
dellist=['sequenceNo', 'marketAirlineCode',
|
||||
'departureProvinceId','departureCityId','departureCityCode','departureAirportShortName','departureTerminal',
|
||||
'arrivalProvinceId','arrivalCityId','arrivalCityCode','arrivalAirportShortName','arrivalTerminal',
|
||||
'transferDuration','stopList','leakedVisaTagSwitch','trafficType','highLightPlaneNo','mealType',
|
||||
'operateAirlineCode','arrivalDateTime','departureDateTime','operateFlightNo','operateAirlineName']
|
||||
for value in dellist:
|
||||
try:
|
||||
flightUnitList.pop(value)
|
||||
except:
|
||||
continue
|
||||
|
||||
#更新日期格式
|
||||
flightUnitList.update({'departureday': departureday, 'departuretime': departuretime,
|
||||
'arrivalday': arrivalday, 'arrivaltime': arrivaltime})
|
||||
|
||||
|
||||
self.flights=pd.concat([self.flights,pd.DataFrame(flightUnitList,index=[0])],ignore_index=True)
|
||||
|
||||
|
||||
|
||||
def proc_priceList(self):
|
||||
for flightlist in self.flightItineraryList:
|
||||
flightNo=flightlist['itineraryId'].split('_')[0]
|
||||
priceList=flightlist['priceList']
|
||||
|
||||
#经济舱,经济舱折扣
|
||||
economy,economy_discount=[],[]
|
||||
#商务舱,商务舱折扣
|
||||
bussiness,bussiness_discount=[],[]
|
||||
|
||||
for price in priceList:
|
||||
adultPrice=price['adultPrice']
|
||||
cabin=price['cabin']
|
||||
priceUnitList=dict(price['priceUnitList'][0]['flightSeatList'][0])
|
||||
discountRate=priceUnitList['discountRate']
|
||||
#经济舱
|
||||
if cabin=='Y':
|
||||
economy.append(adultPrice)
|
||||
economy_discount.append(discountRate)
|
||||
#商务舱
|
||||
elif cabin=='C':
|
||||
bussiness.append(adultPrice)
|
||||
bussiness_discount.append(discountRate)
|
||||
|
||||
if economy !=[]:
|
||||
try:
|
||||
economy_origin=economy[economy_discount.index(1)]
|
||||
except:
|
||||
economy_origin=int(max(economy)/max(economy_discount))
|
||||
|
||||
if min(economy_discount) !=1:
|
||||
economy_low=min(economy)
|
||||
economy_cut=min(economy_discount)
|
||||
else:
|
||||
economy_low=''
|
||||
economy_cut=''
|
||||
|
||||
else:
|
||||
economy_origin=''
|
||||
economy_low=''
|
||||
economy_cut=''
|
||||
|
||||
|
||||
if bussiness !=[]:
|
||||
try:
|
||||
bussiness_origin=bussiness[bussiness_discount.index(1)]
|
||||
except:
|
||||
bussiness_origin=int(max(bussiness)/max(bussiness_discount))
|
||||
|
||||
if min(bussiness_discount) !=1:
|
||||
bussiness_low=min(bussiness)
|
||||
bussiness_cut=min(bussiness_discount)
|
||||
else:
|
||||
bussiness_low=''
|
||||
bussiness_cut=''
|
||||
|
||||
else:
|
||||
bussiness_origin=''
|
||||
bussiness_low=''
|
||||
bussiness_cut=''
|
||||
|
||||
price_info={'flightNo':flightNo,
|
||||
'economy_origin':economy_origin,'economy_low':economy_low,'economy_cut':economy_cut,
|
||||
'bussiness_origin':bussiness_origin,'bussiness_low':bussiness_low,'bussiness_cut':bussiness_cut}
|
||||
|
||||
#self.prices=self.prices.append(price_info,ignore_index=True)
|
||||
self.prices=pd.concat([self.prices,pd.DataFrame(price_info,index=[0])],ignore_index=True)
|
||||
|
||||
|
||||
|
||||
def mergedata(self):
|
||||
try:
|
||||
self.df = self.flights.merge(self.prices,on=['flightNo'])
|
||||
|
||||
self.df['数据获取日期']=dt.now().strftime('%Y-%m-%d')
|
||||
|
||||
#对pandas的columns进行重命名
|
||||
order=['数据获取日期','航班号','航空公司',
|
||||
'出发日期','出发时间','到达日期','到达时间','飞行时长','出发国家','出发城市','出发机场','出发机场三字码',
|
||||
'到达国家','到达城市','到达机场','到达机场三字码','飞机型号','飞机尺寸','飞机型号三字码',
|
||||
'经济舱原价','经济舱最低价','经济舱折扣','商务舱原价','商务舱最低价','商务舱折扣',
|
||||
'到达准点率','停留次数']
|
||||
|
||||
origin=['数据获取日期','flightNo','marketAirlineName',
|
||||
'departureday','departuretime','arrivalday','arrivaltime','duration',
|
||||
'departureCountryName','departureCityName','departureAirportName','departureAirportCode',
|
||||
'arrivalCountryName','arrivalCityName','arrivalAirportName','arrivalAirportCode',
|
||||
'aircraftName','aircraftSize','aircraftCode',
|
||||
'economy_origin','economy_low','economy_cut',
|
||||
'bussiness_origin','bussiness_low','bussiness_cut',
|
||||
'arrivalPunctuality','stopCount']
|
||||
|
||||
columns=dict(zip(origin,order))
|
||||
|
||||
self.df=self.df.rename(columns=columns)
|
||||
|
||||
self.df = self.df[order]
|
||||
|
||||
|
||||
if not os.path.exists(self.date):
|
||||
os.makedirs(self.date)
|
||||
|
||||
filename=os.getcwd()+'\\'+self.date+'\\'+self.date+'-'+self.city[0]+'-'+self.city[1]+'.csv'
|
||||
|
||||
self.df.to_csv(filename,encoding='GB18030',index=False)
|
||||
|
||||
print('\n数据爬取完成',filename)
|
||||
except Exception as e:
|
||||
print('合并数据失败',e)
|
||||
|
||||
|
||||
def demain(self,citys):
|
||||
#设置出发日期
|
||||
self.date=dt.now()+timedelta(days=1)
|
||||
self.date=self.date.strftime('%Y-%m-%d')
|
||||
|
||||
for city in citys:
|
||||
self.city=city
|
||||
|
||||
if citys.index(city)==0:
|
||||
#第一次运行
|
||||
self.getpage()
|
||||
else:
|
||||
#后续运行只需更换出发与目的地
|
||||
self.changecity()
|
||||
|
||||
#运行结束退出
|
||||
self.driver.quit()
|
||||
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
citys=[]
|
||||
city=['上海','广州','深圳','北京']
|
||||
#形成城市对
|
||||
ytic=list(reversed(city))
|
||||
for m in city:
|
||||
for n in ytic:
|
||||
if m==n:
|
||||
continue
|
||||
else:
|
||||
citys.append([m,n])
|
||||
fly = FLIGHT()
|
||||
fly.demain(citys)
|
||||
print('\n程序运行完成!!!!')
|
||||
|
@ -1,143 +0,0 @@
|
||||
import requests
|
||||
import datetime
|
||||
import re
|
||||
import demjson
|
||||
import time
|
||||
import pandas as pd
|
||||
|
||||
def create_assist_date(datestart = None,dateend = None):
|
||||
# 创建日期辅助表
|
||||
if datestart is None:
|
||||
datestart = '2020-01-01'
|
||||
if dateend is None:
|
||||
dateend = (datetime.datetime.now()+datetime.timedelta(days=-1)).strftime('%Y-%m-%d')
|
||||
|
||||
# 转为日期格式
|
||||
datestart=datetime.datetime.strptime(datestart,'%Y-%m-%d')
|
||||
dateend=datetime.datetime.strptime(dateend,'%Y-%m-%d')
|
||||
date_list = []
|
||||
date_list.append(datestart.strftime('%Y-%m-%d'))
|
||||
while datestart<dateend:
|
||||
# 日期叠加一天
|
||||
datestart+=datetime.timedelta(days=+1)
|
||||
# 日期转字符串存入列表
|
||||
date_list.append(datestart.strftime('%Y-%m-%d'))
|
||||
return date_list
|
||||
|
||||
def getdata(citys,dateseries):
|
||||
url='https://www.lsjpjg.com/getthis.php'
|
||||
|
||||
headers={
|
||||
'Accept': 'application/json, text/javascript, */*; q=0.01',
|
||||
'Accept-Encoding': 'gzip, deflate, br',
|
||||
'Accept-Language': 'zh-CN,zh;q=0.9',
|
||||
'Host': 'www.lsjpjg.com',
|
||||
'Origin': 'https://www.lsjpjg.com',
|
||||
'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/96.0.4647.116 Safari/537.36',
|
||||
'X-Requested-With': 'XMLHttpRequest'
|
||||
}
|
||||
|
||||
for city in citys:
|
||||
df=pd.DataFrame()
|
||||
err=0
|
||||
|
||||
for date in dateseries:
|
||||
|
||||
data={'dep_dt': date,'dep_ct': city[0],'arr_ct': city[1]}
|
||||
res=requests.post(url, headers=headers,data=data)
|
||||
#判断航线是否一直不存在
|
||||
if res.text=='\ufeff[]' :
|
||||
print(city,'无航班',date)
|
||||
err+=1
|
||||
#数量超过阈值则中断该航线
|
||||
if err>30:
|
||||
break
|
||||
continue
|
||||
else:
|
||||
err-=1
|
||||
print(city,date)
|
||||
|
||||
res.encoding=res.apparent_encoding
|
||||
NewResponse = re.sub(r"/","",res.text)
|
||||
try:
|
||||
r=NewResponse.encode('utf-8')
|
||||
j=demjson.decode(r)
|
||||
except:
|
||||
continue
|
||||
temp=pd.DataFrame(j)
|
||||
try:
|
||||
temp.drop('icon',axis=1,inplace=True)
|
||||
temp['出发日期']=date
|
||||
except:
|
||||
continue
|
||||
df=pd.concat([df,temp])
|
||||
time.sleep(0.5)
|
||||
|
||||
filename=city[0]+'-'+city[1]
|
||||
#处理原始数据
|
||||
proc_data(filename,df,interval=8)
|
||||
|
||||
|
||||
def proc_data(filename,df,interval=8):
|
||||
#保存原始数据至本地
|
||||
df.to_csv(filename+'.csv',encoding='GB18030')
|
||||
df['全票价']=0
|
||||
df['日期差']=None
|
||||
|
||||
for i in df.index:
|
||||
try:
|
||||
if not '经济' in df['discount'][i]:
|
||||
df.drop(index=i,inplace=True)
|
||||
elif '折' in df['discount'][i]:
|
||||
#判断出发日期与查询日期之间的间隔是否大于阈值
|
||||
delta=datetime.datetime.strptime(df['出发日期'][i],'%Y-%m-%d')-datetime.datetime.strptime(df['qry_dt'][i],'%Y-%m-%d')
|
||||
if delta.days >interval:
|
||||
df.drop(index=i,inplace=True)
|
||||
continue
|
||||
else:
|
||||
df.loc[i,'日期差']=delta.days
|
||||
#通过折扣率计算全票价
|
||||
discount=float(re.findall('\d+\.?\d*',df['discount'][i])[0])
|
||||
full_price=df['price'][i]/discount*10
|
||||
df.loc[i,'全票价']=full_price
|
||||
|
||||
elif ('全价'or'经典') in df['discount'][i]:
|
||||
#判断出发日期与查询日期之间的间隔是否大于阈值
|
||||
delta=datetime.datetime.strptime(df['出发日期'][i],'%Y-%m-%d')-datetime.datetime.strptime(df['qry_dt'][i],'%Y-%m-%d')
|
||||
if delta.days >interval:
|
||||
df.drop(index=i,inplace=True)
|
||||
continue
|
||||
else:
|
||||
df.loc[i,'日期差']=delta.days
|
||||
#全票价
|
||||
full_price=df['price'][i]
|
||||
df.loc[i,'全票价']=full_price
|
||||
except:
|
||||
df.drop(index=i,inplace=True)
|
||||
|
||||
avg_full_price=df[df['全票价']!=0].groupby(['出发日期'])[['全票价']].mean()
|
||||
avg_price=df[df['全票价']!=df['price']].groupby(['出发日期'])[['price']].mean()
|
||||
result=pd.concat([avg_price,avg_full_price],axis=1)
|
||||
|
||||
result['折扣']=result['price']/result['全票价']
|
||||
|
||||
#将处理后的数据保存至本地
|
||||
result.to_csv(result+'-'+filename+'.csv',encoding='GB18030')
|
||||
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
citys=[]
|
||||
#设置开始与结束日期
|
||||
dateseries=create_assist_date(datestart = None,dateend = None)
|
||||
|
||||
city=['上海','广州','深圳','北京']
|
||||
ytic=list(reversed(city))
|
||||
for m in city:
|
||||
for n in ytic:
|
||||
if m==n:
|
||||
continue
|
||||
else:
|
||||
citys.append([m,n])
|
||||
|
||||
getdata(citys,dateseries)
|
@ -1,17 +0,0 @@
|
||||
# must be unique in a given SonarQube instance
|
||||
sonar.projectKey=clawer
|
||||
|
||||
# --- optional properties ---
|
||||
|
||||
# defaults to project key
|
||||
sonar.projectName=clawer
|
||||
# defaults to 'not provided'
|
||||
#sonar.projectVersion=1.0
|
||||
|
||||
# Path is relative to the sonar-project.properties file. Defaults to .
|
||||
#sonar.sources=src,WebContent
|
||||
|
||||
# Encoding of the source code. Default is default system encoding
|
||||
sonar.sourceEncoding=UTF-8
|
||||
|
||||
#sonar.java.binaries=target/classes/javabean,target/classes/servlet
|
File diff suppressed because one or more lines are too long
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@ -1,27 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<classpath>
|
||||
<classpathentry including="**/*.java" kind="src" output="target/classes" path="src">
|
||||
<attributes>
|
||||
<attribute name="optional" value="true"/>
|
||||
<attribute name="maven.pomderived" value="true"/>
|
||||
</attributes>
|
||||
</classpathentry>
|
||||
<classpathentry kind="con" path="org.eclipse.jst.j2ee.internal.web.container"/>
|
||||
<classpathentry kind="con" path="org.eclipse.jst.j2ee.internal.module.container"/>
|
||||
<classpathentry kind="con" path="org.eclipse.m2e.MAVEN2_CLASSPATH_CONTAINER">
|
||||
<attributes>
|
||||
<attribute name="maven.pomderived" value="true"/>
|
||||
</attributes>
|
||||
</classpathentry>
|
||||
<classpathentry kind="lib" path="WebContent/WEB-INF/lib/commons-fileupload-1.4.jar"/>
|
||||
<classpathentry kind="lib" path="WebContent/WEB-INF/lib/commons-io-2.6.jar"/>
|
||||
<classpathentry kind="lib" path="WebContent/WEB-INF/lib/json-20240303.jar"/>
|
||||
<classpathentry kind="lib" path="WebContent/WEB-INF/lib/mysql-connector-java-8.0.16.jar"/>
|
||||
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER">
|
||||
<attributes>
|
||||
<attribute name="module" value="true"/>
|
||||
</attributes>
|
||||
</classpathentry>
|
||||
<classpathentry kind="con" path="org.eclipse.jst.server.core.container/org.eclipse.jst.server.tomcat.runtimeTarget/Apache Tomcat v9.0"/>
|
||||
<classpathentry kind="output" path="target/classes"/>
|
||||
</classpath>
|
@ -1,8 +0,0 @@
|
||||
# Default ignored files
|
||||
/shelf/
|
||||
/workspace.xml
|
||||
# Editor-based HTTP Client requests
|
||||
/httpRequests/
|
||||
# Datasource local storage ignored files
|
||||
/dataSources/
|
||||
/dataSources.local.xml
|
@ -1,6 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project version="4">
|
||||
<component name="ProjectRootManager" version="2" languageLevel="JDK_21" default="true" project-jdk-name="jdk21" project-jdk-type="JavaSDK">
|
||||
<output url="file://$PROJECT_DIR$/classes" />
|
||||
</component>
|
||||
</project>
|
@ -1,37 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<projectDescription>
|
||||
<name>air_ticket_book</name>
|
||||
<comment></comment>
|
||||
<projects>
|
||||
</projects>
|
||||
<buildSpec>
|
||||
<buildCommand>
|
||||
<name>org.eclipse.jdt.core.javabuilder</name>
|
||||
<arguments>
|
||||
</arguments>
|
||||
</buildCommand>
|
||||
<buildCommand>
|
||||
<name>org.eclipse.wst.common.project.facet.core.builder</name>
|
||||
<arguments>
|
||||
</arguments>
|
||||
</buildCommand>
|
||||
<buildCommand>
|
||||
<name>org.eclipse.wst.validation.validationbuilder</name>
|
||||
<arguments>
|
||||
</arguments>
|
||||
</buildCommand>
|
||||
<buildCommand>
|
||||
<name>org.eclipse.m2e.core.maven2Builder</name>
|
||||
<arguments>
|
||||
</arguments>
|
||||
</buildCommand>
|
||||
</buildSpec>
|
||||
<natures>
|
||||
<nature>org.eclipse.m2e.core.maven2Nature</nature>
|
||||
<nature>org.eclipse.jem.workbench.JavaEMFNature</nature>
|
||||
<nature>org.eclipse.wst.common.modulecore.ModuleCoreNature</nature>
|
||||
<nature>org.eclipse.wst.common.project.facet.core.nature</nature>
|
||||
<nature>org.eclipse.jdt.core.javanature</nature>
|
||||
<nature>org.eclipse.wst.jsdt.core.jsNature</nature>
|
||||
</natures>
|
||||
</projectDescription>
|
@ -1 +0,0 @@
|
||||
../atob/bin/atob.js
|
@ -1 +0,0 @@
|
||||
../autoprefixer/bin/autoprefixer
|
@ -1 +0,0 @@
|
||||
../browserslist/cli.js
|
@ -1 +0,0 @@
|
||||
../esprima/bin/esparse.js
|
@ -1 +0,0 @@
|
||||
../esprima/bin/esvalidate.js
|
@ -1 +0,0 @@
|
||||
../gonzales-pe/bin/gonzales.js
|
@ -1 +0,0 @@
|
||||
../js-yaml/bin/js-yaml.js
|
@ -1 +0,0 @@
|
||||
../jsesc/bin/jsesc
|
@ -1 +0,0 @@
|
||||
../json5/lib/cli.js
|
@ -1 +0,0 @@
|
||||
../mkdirp/bin/cmd.js
|
@ -1 +0,0 @@
|
||||
../@babel/parser/bin/babel-parser.js
|
@ -1 +0,0 @@
|
||||
../rimraf/bin.js
|
@ -1 +0,0 @@
|
||||
../semver/bin/semver
|
@ -1 +0,0 @@
|
||||
../specificity/bin/specificity
|
@ -1 +0,0 @@
|
||||
../stylelint/bin/stylelint.js
|
@ -1 +0,0 @@
|
||||
../which/bin/which
|
@ -1,22 +0,0 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2014-2018 Sebastian McKenzie <sebmck@gmail.com>
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining
|
||||
a copy of this software and associated documentation files (the
|
||||
"Software"), to deal in the Software without restriction, including
|
||||
without limitation the rights to use, copy, modify, merge, publish,
|
||||
distribute, sublicense, and/or sell copies of the Software, and to
|
||||
permit persons to whom the Software is furnished to do so, subject to
|
||||
the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be
|
||||
included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
||||
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
|
||||
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
|
||||
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
|
||||
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
@ -1,19 +0,0 @@
|
||||
# @babel/code-frame
|
||||
|
||||
> Generate errors that contain a code frame that point to source locations.
|
||||
|
||||
See our website [@babel/code-frame](https://babeljs.io/docs/en/next/babel-code-frame.html) for more information.
|
||||
|
||||
## Install
|
||||
|
||||
Using npm:
|
||||
|
||||
```sh
|
||||
npm install --save-dev @babel/code-frame
|
||||
```
|
||||
|
||||
or using yarn:
|
||||
|
||||
```sh
|
||||
yarn add @babel/code-frame --dev
|
||||
```
|
173
GenFlightRec/.scannerwork/css-bundle/node_modules/@babel/code-frame/lib/index.js
generated
vendored
173
GenFlightRec/.scannerwork/css-bundle/node_modules/@babel/code-frame/lib/index.js
generated
vendored
@ -1,173 +0,0 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.codeFrameColumns = codeFrameColumns;
|
||||
exports.default = _default;
|
||||
|
||||
function _highlight() {
|
||||
const data = _interopRequireWildcard(require("@babel/highlight"));
|
||||
|
||||
_highlight = function () {
|
||||
return data;
|
||||
};
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
function _interopRequireWildcard(obj) { if (obj && obj.__esModule) { return obj; } else { var newObj = {}; if (obj != null) { for (var key in obj) { if (Object.prototype.hasOwnProperty.call(obj, key)) { var desc = Object.defineProperty && Object.getOwnPropertyDescriptor ? Object.getOwnPropertyDescriptor(obj, key) : {}; if (desc.get || desc.set) { Object.defineProperty(newObj, key, desc); } else { newObj[key] = obj[key]; } } } } newObj.default = obj; return newObj; } }
|
||||
|
||||
let deprecationWarningShown = false;
|
||||
|
||||
function getDefs(chalk) {
|
||||
return {
|
||||
gutter: chalk.grey,
|
||||
marker: chalk.red.bold,
|
||||
message: chalk.red.bold
|
||||
};
|
||||
}
|
||||
|
||||
const NEWLINE = /\r\n|[\n\r\u2028\u2029]/;
|
||||
|
||||
function getMarkerLines(loc, source, opts) {
|
||||
const startLoc = Object.assign({
|
||||
column: 0,
|
||||
line: -1
|
||||
}, loc.start);
|
||||
const endLoc = Object.assign({}, startLoc, loc.end);
|
||||
const {
|
||||
linesAbove = 2,
|
||||
linesBelow = 3
|
||||
} = opts || {};
|
||||
const startLine = startLoc.line;
|
||||
const startColumn = startLoc.column;
|
||||
const endLine = endLoc.line;
|
||||
const endColumn = endLoc.column;
|
||||
let start = Math.max(startLine - (linesAbove + 1), 0);
|
||||
let end = Math.min(source.length, endLine + linesBelow);
|
||||
|
||||
if (startLine === -1) {
|
||||
start = 0;
|
||||
}
|
||||
|
||||
if (endLine === -1) {
|
||||
end = source.length;
|
||||
}
|
||||
|
||||
const lineDiff = endLine - startLine;
|
||||
const markerLines = {};
|
||||
|
||||
if (lineDiff) {
|
||||
for (let i = 0; i <= lineDiff; i++) {
|
||||
const lineNumber = i + startLine;
|
||||
|
||||
if (!startColumn) {
|
||||
markerLines[lineNumber] = true;
|
||||
} else if (i === 0) {
|
||||
const sourceLength = source[lineNumber - 1].length;
|
||||
markerLines[lineNumber] = [startColumn, sourceLength - startColumn];
|
||||
} else if (i === lineDiff) {
|
||||
markerLines[lineNumber] = [0, endColumn];
|
||||
} else {
|
||||
const sourceLength = source[lineNumber - i].length;
|
||||
markerLines[lineNumber] = [0, sourceLength];
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if (startColumn === endColumn) {
|
||||
if (startColumn) {
|
||||
markerLines[startLine] = [startColumn, 0];
|
||||
} else {
|
||||
markerLines[startLine] = true;
|
||||
}
|
||||
} else {
|
||||
markerLines[startLine] = [startColumn, endColumn - startColumn];
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
start,
|
||||
end,
|
||||
markerLines
|
||||
};
|
||||
}
|
||||
|
||||
function codeFrameColumns(rawLines, loc, opts = {}) {
|
||||
const highlighted = (opts.highlightCode || opts.forceColor) && (0, _highlight().shouldHighlight)(opts);
|
||||
const chalk = (0, _highlight().getChalk)(opts);
|
||||
const defs = getDefs(chalk);
|
||||
|
||||
const maybeHighlight = (chalkFn, string) => {
|
||||
return highlighted ? chalkFn(string) : string;
|
||||
};
|
||||
|
||||
if (highlighted) rawLines = (0, _highlight().default)(rawLines, opts);
|
||||
const lines = rawLines.split(NEWLINE);
|
||||
const {
|
||||
start,
|
||||
end,
|
||||
markerLines
|
||||
} = getMarkerLines(loc, lines, opts);
|
||||
const hasColumns = loc.start && typeof loc.start.column === "number";
|
||||
const numberMaxWidth = String(end).length;
|
||||
let frame = lines.slice(start, end).map((line, index) => {
|
||||
const number = start + 1 + index;
|
||||
const paddedNumber = ` ${number}`.slice(-numberMaxWidth);
|
||||
const gutter = ` ${paddedNumber} | `;
|
||||
const hasMarker = markerLines[number];
|
||||
const lastMarkerLine = !markerLines[number + 1];
|
||||
|
||||
if (hasMarker) {
|
||||
let markerLine = "";
|
||||
|
||||
if (Array.isArray(hasMarker)) {
|
||||
const markerSpacing = line.slice(0, Math.max(hasMarker[0] - 1, 0)).replace(/[^\t]/g, " ");
|
||||
const numberOfMarkers = hasMarker[1] || 1;
|
||||
markerLine = ["\n ", maybeHighlight(defs.gutter, gutter.replace(/\d/g, " ")), markerSpacing, maybeHighlight(defs.marker, "^").repeat(numberOfMarkers)].join("");
|
||||
|
||||
if (lastMarkerLine && opts.message) {
|
||||
markerLine += " " + maybeHighlight(defs.message, opts.message);
|
||||
}
|
||||
}
|
||||
|
||||
return [maybeHighlight(defs.marker, ">"), maybeHighlight(defs.gutter, gutter), line, markerLine].join("");
|
||||
} else {
|
||||
return ` ${maybeHighlight(defs.gutter, gutter)}${line}`;
|
||||
}
|
||||
}).join("\n");
|
||||
|
||||
if (opts.message && !hasColumns) {
|
||||
frame = `${" ".repeat(numberMaxWidth + 1)}${opts.message}\n${frame}`;
|
||||
}
|
||||
|
||||
if (highlighted) {
|
||||
return chalk.reset(frame);
|
||||
} else {
|
||||
return frame;
|
||||
}
|
||||
}
|
||||
|
||||
function _default(rawLines, lineNumber, colNumber, opts = {}) {
|
||||
if (!deprecationWarningShown) {
|
||||
deprecationWarningShown = true;
|
||||
const message = "Passing lineNumber and colNumber is deprecated to @babel/code-frame. Please use `codeFrameColumns`.";
|
||||
|
||||
if (process.emitWarning) {
|
||||
process.emitWarning(message, "DeprecationWarning");
|
||||
} else {
|
||||
const deprecationError = new Error(message);
|
||||
deprecationError.name = "DeprecationWarning";
|
||||
console.warn(new Error(message));
|
||||
}
|
||||
}
|
||||
|
||||
colNumber = Math.max(colNumber, 0);
|
||||
const location = {
|
||||
start: {
|
||||
column: colNumber,
|
||||
line: lineNumber
|
||||
}
|
||||
};
|
||||
return codeFrameColumns(rawLines, location, opts);
|
||||
}
|
54
GenFlightRec/.scannerwork/css-bundle/node_modules/@babel/code-frame/package.json
generated
vendored
54
GenFlightRec/.scannerwork/css-bundle/node_modules/@babel/code-frame/package.json
generated
vendored
@ -1,54 +0,0 @@
|
||||
{
|
||||
"_args": [
|
||||
[
|
||||
"@babel/code-frame@7.0.0",
|
||||
"/home/travis/build/SonarSource/sonar-css/sonar-css-plugin/css-bundle"
|
||||
]
|
||||
],
|
||||
"_from": "@babel/code-frame@7.0.0",
|
||||
"_id": "@babel/code-frame@7.0.0",
|
||||
"_inBundle": false,
|
||||
"_integrity": "sha512-OfC2uemaknXr87bdLUkWog7nYuliM9Ij5HUcajsVcMCpQrcLmtxRbVFTIqmcSkSeYRBFBRxs2FiUqFJDLdiebA==",
|
||||
"_location": "/@babel/code-frame",
|
||||
"_phantomChildren": {},
|
||||
"_requested": {
|
||||
"type": "version",
|
||||
"registry": true,
|
||||
"raw": "@babel/code-frame@7.0.0",
|
||||
"name": "@babel/code-frame",
|
||||
"escapedName": "@babel%2fcode-frame",
|
||||
"scope": "@babel",
|
||||
"rawSpec": "7.0.0",
|
||||
"saveSpec": null,
|
||||
"fetchSpec": "7.0.0"
|
||||
},
|
||||
"_requiredBy": [
|
||||
"/@babel/core",
|
||||
"/@babel/template",
|
||||
"/@babel/traverse"
|
||||
],
|
||||
"_resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.0.0.tgz",
|
||||
"_spec": "7.0.0",
|
||||
"_where": "/home/travis/build/SonarSource/sonar-css/sonar-css-plugin/css-bundle",
|
||||
"author": {
|
||||
"name": "Sebastian McKenzie",
|
||||
"email": "sebmck@gmail.com"
|
||||
},
|
||||
"dependencies": {
|
||||
"@babel/highlight": "^7.0.0"
|
||||
},
|
||||
"description": "Generate errors that contain a code frame that point to source locations.",
|
||||
"devDependencies": {
|
||||
"chalk": "^2.0.0",
|
||||
"strip-ansi": "^4.0.0"
|
||||
},
|
||||
"homepage": "https://babeljs.io/",
|
||||
"license": "MIT",
|
||||
"main": "lib/index.js",
|
||||
"name": "@babel/code-frame",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/babel/babel/tree/master/packages/babel-code-frame"
|
||||
},
|
||||
"version": "7.0.0"
|
||||
}
|
@ -1,22 +0,0 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2014-present Sebastian McKenzie and other contributors
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining
|
||||
a copy of this software and associated documentation files (the
|
||||
"Software"), to deal in the Software without restriction, including
|
||||
without limitation the rights to use, copy, modify, merge, publish,
|
||||
distribute, sublicense, and/or sell copies of the Software, and to
|
||||
permit persons to whom the Software is furnished to do so, subject to
|
||||
the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be
|
||||
included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
||||
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
|
||||
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
|
||||
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
|
||||
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
@ -1,19 +0,0 @@
|
||||
# @babel/core
|
||||
|
||||
> Babel compiler core.
|
||||
|
||||
See our website [@babel/core](https://babeljs.io/docs/en/next/babel-core.html) for more information or the [issues](https://github.com/babel/babel/issues?utf8=%E2%9C%93&q=is%3Aissue+label%3A%22pkg%3A%20core%22+is%3Aopen) associated with this package.
|
||||
|
||||
## Install
|
||||
|
||||
Using npm:
|
||||
|
||||
```sh
|
||||
npm install --save-dev @babel/core
|
||||
```
|
||||
|
||||
or using yarn:
|
||||
|
||||
```sh
|
||||
yarn add @babel/core --dev
|
||||
```
|
199
GenFlightRec/.scannerwork/css-bundle/node_modules/@babel/core/lib/config/caching.js
generated
vendored
199
GenFlightRec/.scannerwork/css-bundle/node_modules/@babel/core/lib/config/caching.js
generated
vendored
@ -1,199 +0,0 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.makeStrongCache = makeStrongCache;
|
||||
exports.makeWeakCache = makeWeakCache;
|
||||
exports.assertSimpleType = assertSimpleType;
|
||||
|
||||
function makeStrongCache(handler) {
|
||||
return makeCachedFunction(new Map(), handler);
|
||||
}
|
||||
|
||||
function makeWeakCache(handler) {
|
||||
return makeCachedFunction(new WeakMap(), handler);
|
||||
}
|
||||
|
||||
function makeCachedFunction(callCache, handler) {
|
||||
return function cachedFunction(arg, data) {
|
||||
let cachedValue = callCache.get(arg);
|
||||
|
||||
if (cachedValue) {
|
||||
for (const _ref of cachedValue) {
|
||||
const {
|
||||
value,
|
||||
valid
|
||||
} = _ref;
|
||||
if (valid(data)) return value;
|
||||
}
|
||||
}
|
||||
|
||||
const cache = new CacheConfigurator(data);
|
||||
const value = handler(arg, cache);
|
||||
if (!cache.configured()) cache.forever();
|
||||
cache.deactivate();
|
||||
|
||||
switch (cache.mode()) {
|
||||
case "forever":
|
||||
cachedValue = [{
|
||||
value,
|
||||
valid: () => true
|
||||
}];
|
||||
callCache.set(arg, cachedValue);
|
||||
break;
|
||||
|
||||
case "invalidate":
|
||||
cachedValue = [{
|
||||
value,
|
||||
valid: cache.validator()
|
||||
}];
|
||||
callCache.set(arg, cachedValue);
|
||||
break;
|
||||
|
||||
case "valid":
|
||||
if (cachedValue) {
|
||||
cachedValue.push({
|
||||
value,
|
||||
valid: cache.validator()
|
||||
});
|
||||
} else {
|
||||
cachedValue = [{
|
||||
value,
|
||||
valid: cache.validator()
|
||||
}];
|
||||
callCache.set(arg, cachedValue);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
return value;
|
||||
};
|
||||
}
|
||||
|
||||
class CacheConfigurator {
|
||||
constructor(data) {
|
||||
this._active = true;
|
||||
this._never = false;
|
||||
this._forever = false;
|
||||
this._invalidate = false;
|
||||
this._configured = false;
|
||||
this._pairs = [];
|
||||
this._data = data;
|
||||
}
|
||||
|
||||
simple() {
|
||||
return makeSimpleConfigurator(this);
|
||||
}
|
||||
|
||||
mode() {
|
||||
if (this._never) return "never";
|
||||
if (this._forever) return "forever";
|
||||
if (this._invalidate) return "invalidate";
|
||||
return "valid";
|
||||
}
|
||||
|
||||
forever() {
|
||||
if (!this._active) {
|
||||
throw new Error("Cannot change caching after evaluation has completed.");
|
||||
}
|
||||
|
||||
if (this._never) {
|
||||
throw new Error("Caching has already been configured with .never()");
|
||||
}
|
||||
|
||||
this._forever = true;
|
||||
this._configured = true;
|
||||
}
|
||||
|
||||
never() {
|
||||
if (!this._active) {
|
||||
throw new Error("Cannot change caching after evaluation has completed.");
|
||||
}
|
||||
|
||||
if (this._forever) {
|
||||
throw new Error("Caching has already been configured with .forever()");
|
||||
}
|
||||
|
||||
this._never = true;
|
||||
this._configured = true;
|
||||
}
|
||||
|
||||
using(handler) {
|
||||
if (!this._active) {
|
||||
throw new Error("Cannot change caching after evaluation has completed.");
|
||||
}
|
||||
|
||||
if (this._never || this._forever) {
|
||||
throw new Error("Caching has already been configured with .never or .forever()");
|
||||
}
|
||||
|
||||
this._configured = true;
|
||||
const key = handler(this._data);
|
||||
|
||||
this._pairs.push([key, handler]);
|
||||
|
||||
return key;
|
||||
}
|
||||
|
||||
invalidate(handler) {
|
||||
if (!this._active) {
|
||||
throw new Error("Cannot change caching after evaluation has completed.");
|
||||
}
|
||||
|
||||
if (this._never || this._forever) {
|
||||
throw new Error("Caching has already been configured with .never or .forever()");
|
||||
}
|
||||
|
||||
this._invalidate = true;
|
||||
this._configured = true;
|
||||
const key = handler(this._data);
|
||||
|
||||
this._pairs.push([key, handler]);
|
||||
|
||||
return key;
|
||||
}
|
||||
|
||||
validator() {
|
||||
const pairs = this._pairs;
|
||||
return data => pairs.every(([key, fn]) => key === fn(data));
|
||||
}
|
||||
|
||||
deactivate() {
|
||||
this._active = false;
|
||||
}
|
||||
|
||||
configured() {
|
||||
return this._configured;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
function makeSimpleConfigurator(cache) {
|
||||
function cacheFn(val) {
|
||||
if (typeof val === "boolean") {
|
||||
if (val) cache.forever();else cache.never();
|
||||
return;
|
||||
}
|
||||
|
||||
return cache.using(() => assertSimpleType(val()));
|
||||
}
|
||||
|
||||
cacheFn.forever = () => cache.forever();
|
||||
|
||||
cacheFn.never = () => cache.never();
|
||||
|
||||
cacheFn.using = cb => cache.using(() => assertSimpleType(cb()));
|
||||
|
||||
cacheFn.invalidate = cb => cache.invalidate(() => assertSimpleType(cb()));
|
||||
|
||||
return cacheFn;
|
||||
}
|
||||
|
||||
function assertSimpleType(value) {
|
||||
if (value != null && typeof value !== "string" && typeof value !== "boolean" && typeof value !== "number") {
|
||||
throw new Error("Cache keys must be either string, boolean, number, null, or undefined.");
|
||||
}
|
||||
|
||||
return value;
|
||||
}
|
439
GenFlightRec/.scannerwork/css-bundle/node_modules/@babel/core/lib/config/config-chain.js
generated
vendored
439
GenFlightRec/.scannerwork/css-bundle/node_modules/@babel/core/lib/config/config-chain.js
generated
vendored
@ -1,439 +0,0 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.buildPresetChain = buildPresetChain;
|
||||
exports.buildRootChain = buildRootChain;
|
||||
exports.buildPresetChainWalker = void 0;
|
||||
|
||||
function _path() {
|
||||
const data = _interopRequireDefault(require("path"));
|
||||
|
||||
_path = function () {
|
||||
return data;
|
||||
};
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
function _debug() {
|
||||
const data = _interopRequireDefault(require("debug"));
|
||||
|
||||
_debug = function () {
|
||||
return data;
|
||||
};
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
var _options = require("./validation/options");
|
||||
|
||||
var _patternToRegex = _interopRequireDefault(require("./pattern-to-regex"));
|
||||
|
||||
var _files = require("./files");
|
||||
|
||||
var _caching = require("./caching");
|
||||
|
||||
var _configDescriptors = require("./config-descriptors");
|
||||
|
||||
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
||||
|
||||
const debug = (0, _debug().default)("babel:config:config-chain");
|
||||
|
||||
function buildPresetChain(arg, context) {
|
||||
const chain = buildPresetChainWalker(arg, context);
|
||||
if (!chain) return null;
|
||||
return {
|
||||
plugins: dedupDescriptors(chain.plugins),
|
||||
presets: dedupDescriptors(chain.presets),
|
||||
options: chain.options.map(o => normalizeOptions(o))
|
||||
};
|
||||
}
|
||||
|
||||
const buildPresetChainWalker = makeChainWalker({
|
||||
init: arg => arg,
|
||||
root: preset => loadPresetDescriptors(preset),
|
||||
env: (preset, envName) => loadPresetEnvDescriptors(preset)(envName),
|
||||
overrides: (preset, index) => loadPresetOverridesDescriptors(preset)(index),
|
||||
overridesEnv: (preset, index, envName) => loadPresetOverridesEnvDescriptors(preset)(index)(envName)
|
||||
});
|
||||
exports.buildPresetChainWalker = buildPresetChainWalker;
|
||||
const loadPresetDescriptors = (0, _caching.makeWeakCache)(preset => buildRootDescriptors(preset, preset.alias, _configDescriptors.createUncachedDescriptors));
|
||||
const loadPresetEnvDescriptors = (0, _caching.makeWeakCache)(preset => (0, _caching.makeStrongCache)(envName => buildEnvDescriptors(preset, preset.alias, _configDescriptors.createUncachedDescriptors, envName)));
|
||||
const loadPresetOverridesDescriptors = (0, _caching.makeWeakCache)(preset => (0, _caching.makeStrongCache)(index => buildOverrideDescriptors(preset, preset.alias, _configDescriptors.createUncachedDescriptors, index)));
|
||||
const loadPresetOverridesEnvDescriptors = (0, _caching.makeWeakCache)(preset => (0, _caching.makeStrongCache)(index => (0, _caching.makeStrongCache)(envName => buildOverrideEnvDescriptors(preset, preset.alias, _configDescriptors.createUncachedDescriptors, index, envName))));
|
||||
|
||||
function buildRootChain(opts, context) {
|
||||
const programmaticChain = loadProgrammaticChain({
|
||||
options: opts,
|
||||
dirname: context.cwd
|
||||
}, context);
|
||||
if (!programmaticChain) return null;
|
||||
let configFile;
|
||||
|
||||
if (typeof opts.configFile === "string") {
|
||||
configFile = (0, _files.loadConfig)(opts.configFile, context.cwd, context.envName, context.caller);
|
||||
} else if (opts.configFile !== false) {
|
||||
configFile = (0, _files.findRootConfig)(context.root, context.envName, context.caller);
|
||||
}
|
||||
|
||||
let {
|
||||
babelrc,
|
||||
babelrcRoots
|
||||
} = opts;
|
||||
let babelrcRootsDirectory = context.cwd;
|
||||
const configFileChain = emptyChain();
|
||||
|
||||
if (configFile) {
|
||||
const validatedFile = validateConfigFile(configFile);
|
||||
const result = loadFileChain(validatedFile, context);
|
||||
if (!result) return null;
|
||||
|
||||
if (babelrc === undefined) {
|
||||
babelrc = validatedFile.options.babelrc;
|
||||
}
|
||||
|
||||
if (babelrcRoots === undefined) {
|
||||
babelrcRootsDirectory = validatedFile.dirname;
|
||||
babelrcRoots = validatedFile.options.babelrcRoots;
|
||||
}
|
||||
|
||||
mergeChain(configFileChain, result);
|
||||
}
|
||||
|
||||
const pkgData = typeof context.filename === "string" ? (0, _files.findPackageData)(context.filename) : null;
|
||||
let ignoreFile, babelrcFile;
|
||||
const fileChain = emptyChain();
|
||||
|
||||
if ((babelrc === true || babelrc === undefined) && pkgData && babelrcLoadEnabled(context, pkgData, babelrcRoots, babelrcRootsDirectory)) {
|
||||
({
|
||||
ignore: ignoreFile,
|
||||
config: babelrcFile
|
||||
} = (0, _files.findRelativeConfig)(pkgData, context.envName, context.caller));
|
||||
|
||||
if (ignoreFile && shouldIgnore(context, ignoreFile.ignore, null, ignoreFile.dirname)) {
|
||||
return null;
|
||||
}
|
||||
|
||||
if (babelrcFile) {
|
||||
const result = loadFileChain(validateBabelrcFile(babelrcFile), context);
|
||||
if (!result) return null;
|
||||
mergeChain(fileChain, result);
|
||||
}
|
||||
}
|
||||
|
||||
const chain = mergeChain(mergeChain(mergeChain(emptyChain(), configFileChain), fileChain), programmaticChain);
|
||||
return {
|
||||
plugins: dedupDescriptors(chain.plugins),
|
||||
presets: dedupDescriptors(chain.presets),
|
||||
options: chain.options.map(o => normalizeOptions(o)),
|
||||
ignore: ignoreFile || undefined,
|
||||
babelrc: babelrcFile || undefined,
|
||||
config: configFile || undefined
|
||||
};
|
||||
}
|
||||
|
||||
function babelrcLoadEnabled(context, pkgData, babelrcRoots, babelrcRootsDirectory) {
|
||||
if (typeof babelrcRoots === "boolean") return babelrcRoots;
|
||||
const absoluteRoot = context.root;
|
||||
|
||||
if (babelrcRoots === undefined) {
|
||||
return pkgData.directories.indexOf(absoluteRoot) !== -1;
|
||||
}
|
||||
|
||||
let babelrcPatterns = babelrcRoots;
|
||||
if (!Array.isArray(babelrcPatterns)) babelrcPatterns = [babelrcPatterns];
|
||||
babelrcPatterns = babelrcPatterns.map(pat => {
|
||||
return typeof pat === "string" ? _path().default.resolve(babelrcRootsDirectory, pat) : pat;
|
||||
});
|
||||
|
||||
if (babelrcPatterns.length === 1 && babelrcPatterns[0] === absoluteRoot) {
|
||||
return pkgData.directories.indexOf(absoluteRoot) !== -1;
|
||||
}
|
||||
|
||||
return babelrcPatterns.some(pat => {
|
||||
if (typeof pat === "string") {
|
||||
pat = (0, _patternToRegex.default)(pat, babelrcRootsDirectory);
|
||||
}
|
||||
|
||||
return pkgData.directories.some(directory => {
|
||||
return matchPattern(pat, babelrcRootsDirectory, directory, context);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
const validateConfigFile = (0, _caching.makeWeakCache)(file => ({
|
||||
filepath: file.filepath,
|
||||
dirname: file.dirname,
|
||||
options: (0, _options.validate)("configfile", file.options)
|
||||
}));
|
||||
const validateBabelrcFile = (0, _caching.makeWeakCache)(file => ({
|
||||
filepath: file.filepath,
|
||||
dirname: file.dirname,
|
||||
options: (0, _options.validate)("babelrcfile", file.options)
|
||||
}));
|
||||
const validateExtendFile = (0, _caching.makeWeakCache)(file => ({
|
||||
filepath: file.filepath,
|
||||
dirname: file.dirname,
|
||||
options: (0, _options.validate)("extendsfile", file.options)
|
||||
}));
|
||||
const loadProgrammaticChain = makeChainWalker({
|
||||
root: input => buildRootDescriptors(input, "base", _configDescriptors.createCachedDescriptors),
|
||||
env: (input, envName) => buildEnvDescriptors(input, "base", _configDescriptors.createCachedDescriptors, envName),
|
||||
overrides: (input, index) => buildOverrideDescriptors(input, "base", _configDescriptors.createCachedDescriptors, index),
|
||||
overridesEnv: (input, index, envName) => buildOverrideEnvDescriptors(input, "base", _configDescriptors.createCachedDescriptors, index, envName)
|
||||
});
|
||||
const loadFileChain = makeChainWalker({
|
||||
root: file => loadFileDescriptors(file),
|
||||
env: (file, envName) => loadFileEnvDescriptors(file)(envName),
|
||||
overrides: (file, index) => loadFileOverridesDescriptors(file)(index),
|
||||
overridesEnv: (file, index, envName) => loadFileOverridesEnvDescriptors(file)(index)(envName)
|
||||
});
|
||||
const loadFileDescriptors = (0, _caching.makeWeakCache)(file => buildRootDescriptors(file, file.filepath, _configDescriptors.createUncachedDescriptors));
|
||||
const loadFileEnvDescriptors = (0, _caching.makeWeakCache)(file => (0, _caching.makeStrongCache)(envName => buildEnvDescriptors(file, file.filepath, _configDescriptors.createUncachedDescriptors, envName)));
|
||||
const loadFileOverridesDescriptors = (0, _caching.makeWeakCache)(file => (0, _caching.makeStrongCache)(index => buildOverrideDescriptors(file, file.filepath, _configDescriptors.createUncachedDescriptors, index)));
|
||||
const loadFileOverridesEnvDescriptors = (0, _caching.makeWeakCache)(file => (0, _caching.makeStrongCache)(index => (0, _caching.makeStrongCache)(envName => buildOverrideEnvDescriptors(file, file.filepath, _configDescriptors.createUncachedDescriptors, index, envName))));
|
||||
|
||||
function buildRootDescriptors({
|
||||
dirname,
|
||||
options
|
||||
}, alias, descriptors) {
|
||||
return descriptors(dirname, options, alias);
|
||||
}
|
||||
|
||||
function buildEnvDescriptors({
|
||||
dirname,
|
||||
options
|
||||
}, alias, descriptors, envName) {
|
||||
const opts = options.env && options.env[envName];
|
||||
return opts ? descriptors(dirname, opts, `${alias}.env["${envName}"]`) : null;
|
||||
}
|
||||
|
||||
function buildOverrideDescriptors({
|
||||
dirname,
|
||||
options
|
||||
}, alias, descriptors, index) {
|
||||
const opts = options.overrides && options.overrides[index];
|
||||
if (!opts) throw new Error("Assertion failure - missing override");
|
||||
return descriptors(dirname, opts, `${alias}.overrides[${index}]`);
|
||||
}
|
||||
|
||||
function buildOverrideEnvDescriptors({
|
||||
dirname,
|
||||
options
|
||||
}, alias, descriptors, index, envName) {
|
||||
const override = options.overrides && options.overrides[index];
|
||||
if (!override) throw new Error("Assertion failure - missing override");
|
||||
const opts = override.env && override.env[envName];
|
||||
return opts ? descriptors(dirname, opts, `${alias}.overrides[${index}].env["${envName}"]`) : null;
|
||||
}
|
||||
|
||||
function makeChainWalker({
|
||||
root,
|
||||
env,
|
||||
overrides,
|
||||
overridesEnv
|
||||
}) {
|
||||
return (input, context, files = new Set()) => {
|
||||
const {
|
||||
dirname
|
||||
} = input;
|
||||
const flattenedConfigs = [];
|
||||
const rootOpts = root(input);
|
||||
|
||||
if (configIsApplicable(rootOpts, dirname, context)) {
|
||||
flattenedConfigs.push(rootOpts);
|
||||
const envOpts = env(input, context.envName);
|
||||
|
||||
if (envOpts && configIsApplicable(envOpts, dirname, context)) {
|
||||
flattenedConfigs.push(envOpts);
|
||||
}
|
||||
|
||||
(rootOpts.options.overrides || []).forEach((_, index) => {
|
||||
const overrideOps = overrides(input, index);
|
||||
|
||||
if (configIsApplicable(overrideOps, dirname, context)) {
|
||||
flattenedConfigs.push(overrideOps);
|
||||
const overrideEnvOpts = overridesEnv(input, index, context.envName);
|
||||
|
||||
if (overrideEnvOpts && configIsApplicable(overrideEnvOpts, dirname, context)) {
|
||||
flattenedConfigs.push(overrideEnvOpts);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
if (flattenedConfigs.some(({
|
||||
options: {
|
||||
ignore,
|
||||
only
|
||||
}
|
||||
}) => shouldIgnore(context, ignore, only, dirname))) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const chain = emptyChain();
|
||||
|
||||
for (const op of flattenedConfigs) {
|
||||
if (!mergeExtendsChain(chain, op.options, dirname, context, files)) {
|
||||
return null;
|
||||
}
|
||||
|
||||
mergeChainOpts(chain, op);
|
||||
}
|
||||
|
||||
return chain;
|
||||
};
|
||||
}
|
||||
|
||||
function mergeExtendsChain(chain, opts, dirname, context, files) {
|
||||
if (opts.extends === undefined) return true;
|
||||
const file = (0, _files.loadConfig)(opts.extends, dirname, context.envName, context.caller);
|
||||
|
||||
if (files.has(file)) {
|
||||
throw new Error(`Configuration cycle detected loading ${file.filepath}.\n` + `File already loaded following the config chain:\n` + Array.from(files, file => ` - ${file.filepath}`).join("\n"));
|
||||
}
|
||||
|
||||
files.add(file);
|
||||
const fileChain = loadFileChain(validateExtendFile(file), context, files);
|
||||
files.delete(file);
|
||||
if (!fileChain) return false;
|
||||
mergeChain(chain, fileChain);
|
||||
return true;
|
||||
}
|
||||
|
||||
function mergeChain(target, source) {
|
||||
target.options.push(...source.options);
|
||||
target.plugins.push(...source.plugins);
|
||||
target.presets.push(...source.presets);
|
||||
return target;
|
||||
}
|
||||
|
||||
function mergeChainOpts(target, {
|
||||
options,
|
||||
plugins,
|
||||
presets
|
||||
}) {
|
||||
target.options.push(options);
|
||||
target.plugins.push(...plugins());
|
||||
target.presets.push(...presets());
|
||||
return target;
|
||||
}
|
||||
|
||||
function emptyChain() {
|
||||
return {
|
||||
options: [],
|
||||
presets: [],
|
||||
plugins: []
|
||||
};
|
||||
}
|
||||
|
||||
function normalizeOptions(opts) {
|
||||
const options = Object.assign({}, opts);
|
||||
delete options.extends;
|
||||
delete options.env;
|
||||
delete options.overrides;
|
||||
delete options.plugins;
|
||||
delete options.presets;
|
||||
delete options.passPerPreset;
|
||||
delete options.ignore;
|
||||
delete options.only;
|
||||
delete options.test;
|
||||
delete options.include;
|
||||
delete options.exclude;
|
||||
|
||||
if (options.hasOwnProperty("sourceMap")) {
|
||||
options.sourceMaps = options.sourceMap;
|
||||
delete options.sourceMap;
|
||||
}
|
||||
|
||||
return options;
|
||||
}
|
||||
|
||||
function dedupDescriptors(items) {
|
||||
const map = new Map();
|
||||
const descriptors = [];
|
||||
|
||||
for (const item of items) {
|
||||
if (typeof item.value === "function") {
|
||||
const fnKey = item.value;
|
||||
let nameMap = map.get(fnKey);
|
||||
|
||||
if (!nameMap) {
|
||||
nameMap = new Map();
|
||||
map.set(fnKey, nameMap);
|
||||
}
|
||||
|
||||
let desc = nameMap.get(item.name);
|
||||
|
||||
if (!desc) {
|
||||
desc = {
|
||||
value: item
|
||||
};
|
||||
descriptors.push(desc);
|
||||
if (!item.ownPass) nameMap.set(item.name, desc);
|
||||
} else {
|
||||
desc.value = item;
|
||||
}
|
||||
} else {
|
||||
descriptors.push({
|
||||
value: item
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return descriptors.reduce((acc, desc) => {
|
||||
acc.push(desc.value);
|
||||
return acc;
|
||||
}, []);
|
||||
}
|
||||
|
||||
function configIsApplicable({
|
||||
options
|
||||
}, dirname, context) {
|
||||
return (options.test === undefined || configFieldIsApplicable(context, options.test, dirname)) && (options.include === undefined || configFieldIsApplicable(context, options.include, dirname)) && (options.exclude === undefined || !configFieldIsApplicable(context, options.exclude, dirname));
|
||||
}
|
||||
|
||||
function configFieldIsApplicable(context, test, dirname) {
|
||||
const patterns = Array.isArray(test) ? test : [test];
|
||||
return matchesPatterns(context, patterns, dirname);
|
||||
}
|
||||
|
||||
function shouldIgnore(context, ignore, only, dirname) {
|
||||
if (ignore && matchesPatterns(context, ignore, dirname)) {
|
||||
debug("Ignored %o because it matched one of %O from %o", context.filename, ignore, dirname);
|
||||
return true;
|
||||
}
|
||||
|
||||
if (only && !matchesPatterns(context, only, dirname)) {
|
||||
debug("Ignored %o because it failed to match one of %O from %o", context.filename, only, dirname);
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
function matchesPatterns(context, patterns, dirname) {
|
||||
return patterns.some(pattern => matchPattern(pattern, dirname, context.filename, context));
|
||||
}
|
||||
|
||||
function matchPattern(pattern, dirname, pathToTest, context) {
|
||||
if (typeof pattern === "function") {
|
||||
return !!pattern(pathToTest, {
|
||||
dirname,
|
||||
envName: context.envName,
|
||||
caller: context.caller
|
||||
});
|
||||
}
|
||||
|
||||
if (typeof pathToTest !== "string") {
|
||||
throw new Error(`Configuration contains string/RegExp pattern, but no filename was passed to Babel`);
|
||||
}
|
||||
|
||||
if (typeof pattern === "string") {
|
||||
pattern = (0, _patternToRegex.default)(pattern, dirname);
|
||||
}
|
||||
|
||||
return pattern.test(pathToTest);
|
||||
}
|
@ -1,210 +0,0 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.createCachedDescriptors = createCachedDescriptors;
|
||||
exports.createUncachedDescriptors = createUncachedDescriptors;
|
||||
exports.createDescriptor = createDescriptor;
|
||||
|
||||
var _files = require("./files");
|
||||
|
||||
var _item = require("./item");
|
||||
|
||||
var _caching = require("./caching");
|
||||
|
||||
function isEqualDescriptor(a, b) {
|
||||
return a.name === b.name && a.value === b.value && a.options === b.options && a.dirname === b.dirname && a.alias === b.alias && a.ownPass === b.ownPass && (a.file && a.file.request) === (b.file && b.file.request) && (a.file && a.file.resolved) === (b.file && b.file.resolved);
|
||||
}
|
||||
|
||||
function createCachedDescriptors(dirname, options, alias) {
|
||||
const {
|
||||
plugins,
|
||||
presets,
|
||||
passPerPreset
|
||||
} = options;
|
||||
return {
|
||||
options,
|
||||
plugins: plugins ? () => createCachedPluginDescriptors(plugins, dirname)(alias) : () => [],
|
||||
presets: presets ? () => createCachedPresetDescriptors(presets, dirname)(alias)(!!passPerPreset) : () => []
|
||||
};
|
||||
}
|
||||
|
||||
function createUncachedDescriptors(dirname, options, alias) {
|
||||
let plugins;
|
||||
let presets;
|
||||
return {
|
||||
options,
|
||||
plugins: () => {
|
||||
if (!plugins) {
|
||||
plugins = createPluginDescriptors(options.plugins || [], dirname, alias);
|
||||
}
|
||||
|
||||
return plugins;
|
||||
},
|
||||
presets: () => {
|
||||
if (!presets) {
|
||||
presets = createPresetDescriptors(options.presets || [], dirname, alias, !!options.passPerPreset);
|
||||
}
|
||||
|
||||
return presets;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
const PRESET_DESCRIPTOR_CACHE = new WeakMap();
|
||||
const createCachedPresetDescriptors = (0, _caching.makeWeakCache)((items, cache) => {
|
||||
const dirname = cache.using(dir => dir);
|
||||
return (0, _caching.makeStrongCache)(alias => (0, _caching.makeStrongCache)(passPerPreset => createPresetDescriptors(items, dirname, alias, passPerPreset).map(desc => loadCachedDescriptor(PRESET_DESCRIPTOR_CACHE, desc))));
|
||||
});
|
||||
const PLUGIN_DESCRIPTOR_CACHE = new WeakMap();
|
||||
const createCachedPluginDescriptors = (0, _caching.makeWeakCache)((items, cache) => {
|
||||
const dirname = cache.using(dir => dir);
|
||||
return (0, _caching.makeStrongCache)(alias => createPluginDescriptors(items, dirname, alias).map(desc => loadCachedDescriptor(PLUGIN_DESCRIPTOR_CACHE, desc)));
|
||||
});
|
||||
const DEFAULT_OPTIONS = {};
|
||||
|
||||
function loadCachedDescriptor(cache, desc) {
|
||||
const {
|
||||
value,
|
||||
options = DEFAULT_OPTIONS
|
||||
} = desc;
|
||||
if (options === false) return desc;
|
||||
let cacheByOptions = cache.get(value);
|
||||
|
||||
if (!cacheByOptions) {
|
||||
cacheByOptions = new WeakMap();
|
||||
cache.set(value, cacheByOptions);
|
||||
}
|
||||
|
||||
let possibilities = cacheByOptions.get(options);
|
||||
|
||||
if (!possibilities) {
|
||||
possibilities = [];
|
||||
cacheByOptions.set(options, possibilities);
|
||||
}
|
||||
|
||||
if (possibilities.indexOf(desc) === -1) {
|
||||
const matches = possibilities.filter(possibility => isEqualDescriptor(possibility, desc));
|
||||
|
||||
if (matches.length > 0) {
|
||||
return matches[0];
|
||||
}
|
||||
|
||||
possibilities.push(desc);
|
||||
}
|
||||
|
||||
return desc;
|
||||
}
|
||||
|
||||
function createPresetDescriptors(items, dirname, alias, passPerPreset) {
|
||||
return createDescriptors("preset", items, dirname, alias, passPerPreset);
|
||||
}
|
||||
|
||||
function createPluginDescriptors(items, dirname, alias) {
|
||||
return createDescriptors("plugin", items, dirname, alias);
|
||||
}
|
||||
|
||||
function createDescriptors(type, items, dirname, alias, ownPass) {
|
||||
const descriptors = items.map((item, index) => createDescriptor(item, dirname, {
|
||||
type,
|
||||
alias: `${alias}$${index}`,
|
||||
ownPass: !!ownPass
|
||||
}));
|
||||
assertNoDuplicates(descriptors);
|
||||
return descriptors;
|
||||
}
|
||||
|
||||
function createDescriptor(pair, dirname, {
|
||||
type,
|
||||
alias,
|
||||
ownPass
|
||||
}) {
|
||||
const desc = (0, _item.getItemDescriptor)(pair);
|
||||
|
||||
if (desc) {
|
||||
return desc;
|
||||
}
|
||||
|
||||
let name;
|
||||
let options;
|
||||
let value = pair;
|
||||
|
||||
if (Array.isArray(value)) {
|
||||
if (value.length === 3) {
|
||||
[value, options, name] = value;
|
||||
} else {
|
||||
[value, options] = value;
|
||||
}
|
||||
}
|
||||
|
||||
let file = undefined;
|
||||
let filepath = null;
|
||||
|
||||
if (typeof value === "string") {
|
||||
if (typeof type !== "string") {
|
||||
throw new Error("To resolve a string-based item, the type of item must be given");
|
||||
}
|
||||
|
||||
const resolver = type === "plugin" ? _files.loadPlugin : _files.loadPreset;
|
||||
const request = value;
|
||||
({
|
||||
filepath,
|
||||
value
|
||||
} = resolver(value, dirname));
|
||||
file = {
|
||||
request,
|
||||
resolved: filepath
|
||||
};
|
||||
}
|
||||
|
||||
if (!value) {
|
||||
throw new Error(`Unexpected falsy value: ${String(value)}`);
|
||||
}
|
||||
|
||||
if (typeof value === "object" && value.__esModule) {
|
||||
if (value.default) {
|
||||
value = value.default;
|
||||
} else {
|
||||
throw new Error("Must export a default export when using ES6 modules.");
|
||||
}
|
||||
}
|
||||
|
||||
if (typeof value !== "object" && typeof value !== "function") {
|
||||
throw new Error(`Unsupported format: ${typeof value}. Expected an object or a function.`);
|
||||
}
|
||||
|
||||
if (filepath !== null && typeof value === "object" && value) {
|
||||
throw new Error(`Plugin/Preset files are not allowed to export objects, only functions. In ${filepath}`);
|
||||
}
|
||||
|
||||
return {
|
||||
name,
|
||||
alias: filepath || alias,
|
||||
value,
|
||||
options,
|
||||
dirname,
|
||||
ownPass,
|
||||
file
|
||||
};
|
||||
}
|
||||
|
||||
function assertNoDuplicates(items) {
|
||||
const map = new Map();
|
||||
|
||||
for (const item of items) {
|
||||
if (typeof item.value !== "function") continue;
|
||||
let nameMap = map.get(item.value);
|
||||
|
||||
if (!nameMap) {
|
||||
nameMap = new Set();
|
||||
map.set(item.value, nameMap);
|
||||
}
|
||||
|
||||
if (nameMap.has(item.name)) {
|
||||
throw new Error([`Duplicate plugin/preset detected.`, `If you'd like to use two separate instances of a plugin,`, `they need separate names, e.g.`, ``, ` plugins: [`, ` ['some-plugin', {}],`, ` ['some-plugin', {}, 'some unique name'],`, ` ]`].join("\n"));
|
||||
}
|
||||
|
||||
nameMap.add(item.name);
|
||||
}
|
||||
}
|
@ -1,323 +0,0 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.findConfigUpwards = findConfigUpwards;
|
||||
exports.findRelativeConfig = findRelativeConfig;
|
||||
exports.findRootConfig = findRootConfig;
|
||||
exports.loadConfig = loadConfig;
|
||||
|
||||
function _debug() {
|
||||
const data = _interopRequireDefault(require("debug"));
|
||||
|
||||
_debug = function () {
|
||||
return data;
|
||||
};
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
function _path() {
|
||||
const data = _interopRequireDefault(require("path"));
|
||||
|
||||
_path = function () {
|
||||
return data;
|
||||
};
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
function _fs() {
|
||||
const data = _interopRequireDefault(require("fs"));
|
||||
|
||||
_fs = function () {
|
||||
return data;
|
||||
};
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
function _json() {
|
||||
const data = _interopRequireDefault(require("json5"));
|
||||
|
||||
_json = function () {
|
||||
return data;
|
||||
};
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
function _resolve() {
|
||||
const data = _interopRequireDefault(require("resolve"));
|
||||
|
||||
_resolve = function () {
|
||||
return data;
|
||||
};
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
var _caching = require("../caching");
|
||||
|
||||
var _configApi = _interopRequireDefault(require("../helpers/config-api"));
|
||||
|
||||
var _utils = require("./utils");
|
||||
|
||||
var _patternToRegex = _interopRequireDefault(require("../pattern-to-regex"));
|
||||
|
||||
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
||||
|
||||
const debug = (0, _debug().default)("babel:config:loading:files:configuration");
|
||||
const BABEL_CONFIG_JS_FILENAME = "babel.config.js";
|
||||
const BABELRC_FILENAME = ".babelrc";
|
||||
const BABELRC_JS_FILENAME = ".babelrc.js";
|
||||
const BABELIGNORE_FILENAME = ".babelignore";
|
||||
|
||||
function findConfigUpwards(rootDir) {
|
||||
let dirname = rootDir;
|
||||
|
||||
while (true) {
|
||||
if (_fs().default.existsSync(_path().default.join(dirname, BABEL_CONFIG_JS_FILENAME))) {
|
||||
return dirname;
|
||||
}
|
||||
|
||||
const nextDir = _path().default.dirname(dirname);
|
||||
|
||||
if (dirname === nextDir) break;
|
||||
dirname = nextDir;
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
function findRelativeConfig(packageData, envName, caller) {
|
||||
let config = null;
|
||||
let ignore = null;
|
||||
|
||||
const dirname = _path().default.dirname(packageData.filepath);
|
||||
|
||||
for (const loc of packageData.directories) {
|
||||
if (!config) {
|
||||
config = [BABELRC_FILENAME, BABELRC_JS_FILENAME].reduce((previousConfig, name) => {
|
||||
const filepath = _path().default.join(loc, name);
|
||||
|
||||
const config = readConfig(filepath, envName, caller);
|
||||
|
||||
if (config && previousConfig) {
|
||||
throw new Error(`Multiple configuration files found. Please remove one:\n` + ` - ${_path().default.basename(previousConfig.filepath)}\n` + ` - ${name}\n` + `from ${loc}`);
|
||||
}
|
||||
|
||||
return config || previousConfig;
|
||||
}, null);
|
||||
const pkgConfig = packageData.pkg && packageData.pkg.dirname === loc ? packageToBabelConfig(packageData.pkg) : null;
|
||||
|
||||
if (pkgConfig) {
|
||||
if (config) {
|
||||
throw new Error(`Multiple configuration files found. Please remove one:\n` + ` - ${_path().default.basename(pkgConfig.filepath)}#babel\n` + ` - ${_path().default.basename(config.filepath)}\n` + `from ${loc}`);
|
||||
}
|
||||
|
||||
config = pkgConfig;
|
||||
}
|
||||
|
||||
if (config) {
|
||||
debug("Found configuration %o from %o.", config.filepath, dirname);
|
||||
}
|
||||
}
|
||||
|
||||
if (!ignore) {
|
||||
const ignoreLoc = _path().default.join(loc, BABELIGNORE_FILENAME);
|
||||
|
||||
ignore = readIgnoreConfig(ignoreLoc);
|
||||
|
||||
if (ignore) {
|
||||
debug("Found ignore %o from %o.", ignore.filepath, dirname);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
config,
|
||||
ignore
|
||||
};
|
||||
}
|
||||
|
||||
function findRootConfig(dirname, envName, caller) {
|
||||
const filepath = _path().default.resolve(dirname, BABEL_CONFIG_JS_FILENAME);
|
||||
|
||||
const conf = readConfig(filepath, envName, caller);
|
||||
|
||||
if (conf) {
|
||||
debug("Found root config %o in $o.", BABEL_CONFIG_JS_FILENAME, dirname);
|
||||
}
|
||||
|
||||
return conf;
|
||||
}
|
||||
|
||||
function loadConfig(name, dirname, envName, caller) {
|
||||
const filepath = _resolve().default.sync(name, {
|
||||
basedir: dirname
|
||||
});
|
||||
|
||||
const conf = readConfig(filepath, envName, caller);
|
||||
|
||||
if (!conf) {
|
||||
throw new Error(`Config file ${filepath} contains no configuration data`);
|
||||
}
|
||||
|
||||
debug("Loaded config %o from $o.", name, dirname);
|
||||
return conf;
|
||||
}
|
||||
|
||||
function readConfig(filepath, envName, caller) {
|
||||
return _path().default.extname(filepath) === ".js" ? readConfigJS(filepath, {
|
||||
envName,
|
||||
caller
|
||||
}) : readConfigJSON5(filepath);
|
||||
}
|
||||
|
||||
const LOADING_CONFIGS = new Set();
|
||||
const readConfigJS = (0, _caching.makeStrongCache)((filepath, cache) => {
|
||||
if (!_fs().default.existsSync(filepath)) {
|
||||
cache.forever();
|
||||
return null;
|
||||
}
|
||||
|
||||
if (LOADING_CONFIGS.has(filepath)) {
|
||||
cache.never();
|
||||
debug("Auto-ignoring usage of config %o.", filepath);
|
||||
return {
|
||||
filepath,
|
||||
dirname: _path().default.dirname(filepath),
|
||||
options: {}
|
||||
};
|
||||
}
|
||||
|
||||
let options;
|
||||
|
||||
try {
|
||||
LOADING_CONFIGS.add(filepath);
|
||||
|
||||
const configModule = require(filepath);
|
||||
|
||||
options = configModule && configModule.__esModule ? configModule.default || undefined : configModule;
|
||||
} catch (err) {
|
||||
err.message = `${filepath}: Error while loading config - ${err.message}`;
|
||||
throw err;
|
||||
} finally {
|
||||
LOADING_CONFIGS.delete(filepath);
|
||||
}
|
||||
|
||||
if (typeof options === "function") {
|
||||
options = options((0, _configApi.default)(cache));
|
||||
if (!cache.configured()) throwConfigError();
|
||||
}
|
||||
|
||||
if (!options || typeof options !== "object" || Array.isArray(options)) {
|
||||
throw new Error(`${filepath}: Configuration should be an exported JavaScript object.`);
|
||||
}
|
||||
|
||||
if (typeof options.then === "function") {
|
||||
throw new Error(`You appear to be using an async configuration, ` + `which your current version of Babel does not support. ` + `We may add support for this in the future, ` + `but if you're on the most recent version of @babel/core and still ` + `seeing this error, then you'll need to synchronously return your config.`);
|
||||
}
|
||||
|
||||
return {
|
||||
filepath,
|
||||
dirname: _path().default.dirname(filepath),
|
||||
options
|
||||
};
|
||||
});
|
||||
const packageToBabelConfig = (0, _caching.makeWeakCache)(file => {
|
||||
const babel = file.options["babel"];
|
||||
if (typeof babel === "undefined") return null;
|
||||
|
||||
if (typeof babel !== "object" || Array.isArray(babel) || babel === null) {
|
||||
throw new Error(`${file.filepath}: .babel property must be an object`);
|
||||
}
|
||||
|
||||
return {
|
||||
filepath: file.filepath,
|
||||
dirname: file.dirname,
|
||||
options: babel
|
||||
};
|
||||
});
|
||||
const readConfigJSON5 = (0, _utils.makeStaticFileCache)((filepath, content) => {
|
||||
let options;
|
||||
|
||||
try {
|
||||
options = _json().default.parse(content);
|
||||
} catch (err) {
|
||||
err.message = `${filepath}: Error while parsing config - ${err.message}`;
|
||||
throw err;
|
||||
}
|
||||
|
||||
if (!options) throw new Error(`${filepath}: No config detected`);
|
||||
|
||||
if (typeof options !== "object") {
|
||||
throw new Error(`${filepath}: Config returned typeof ${typeof options}`);
|
||||
}
|
||||
|
||||
if (Array.isArray(options)) {
|
||||
throw new Error(`${filepath}: Expected config object but found array`);
|
||||
}
|
||||
|
||||
return {
|
||||
filepath,
|
||||
dirname: _path().default.dirname(filepath),
|
||||
options
|
||||
};
|
||||
});
|
||||
const readIgnoreConfig = (0, _utils.makeStaticFileCache)((filepath, content) => {
|
||||
const ignoreDir = _path().default.dirname(filepath);
|
||||
|
||||
const ignorePatterns = content.split("\n").map(line => line.replace(/#(.*?)$/, "").trim()).filter(line => !!line);
|
||||
|
||||
for (const pattern of ignorePatterns) {
|
||||
if (pattern[0] === "!") {
|
||||
throw new Error(`Negation of file paths is not supported.`);
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
filepath,
|
||||
dirname: _path().default.dirname(filepath),
|
||||
ignore: ignorePatterns.map(pattern => (0, _patternToRegex.default)(pattern, ignoreDir))
|
||||
};
|
||||
});
|
||||
|
||||
function throwConfigError() {
|
||||
throw new Error(`\
|
||||
Caching was left unconfigured. Babel's plugins, presets, and .babelrc.js files can be configured
|
||||
for various types of caching, using the first param of their handler functions:
|
||||
|
||||
module.exports = function(api) {
|
||||
// The API exposes the following:
|
||||
|
||||
// Cache the returned value forever and don't call this function again.
|
||||
api.cache(true);
|
||||
|
||||
// Don't cache at all. Not recommended because it will be very slow.
|
||||
api.cache(false);
|
||||
|
||||
// Cached based on the value of some function. If this function returns a value different from
|
||||
// a previously-encountered value, the plugins will re-evaluate.
|
||||
var env = api.cache(() => process.env.NODE_ENV);
|
||||
|
||||
// If testing for a specific env, we recommend specifics to avoid instantiating a plugin for
|
||||
// any possible NODE_ENV value that might come up during plugin execution.
|
||||
var isProd = api.cache(() => process.env.NODE_ENV === "production");
|
||||
|
||||
// .cache(fn) will perform a linear search though instances to find the matching plugin based
|
||||
// based on previous instantiated plugins. If you want to recreate the plugin and discard the
|
||||
// previous instance whenever something changes, you may use:
|
||||
var isProd = api.cache.invalidate(() => process.env.NODE_ENV === "production");
|
||||
|
||||
// Note, we also expose the following more-verbose versions of the above examples:
|
||||
api.cache.forever(); // api.cache(true)
|
||||
api.cache.never(); // api.cache(false)
|
||||
api.cache.using(fn); // api.cache(fn)
|
||||
|
||||
// Return the value that will be cached.
|
||||
return { };
|
||||
};`);
|
||||
}
|
@ -1,59 +0,0 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.findConfigUpwards = findConfigUpwards;
|
||||
exports.findPackageData = findPackageData;
|
||||
exports.findRelativeConfig = findRelativeConfig;
|
||||
exports.findRootConfig = findRootConfig;
|
||||
exports.loadConfig = loadConfig;
|
||||
exports.resolvePlugin = resolvePlugin;
|
||||
exports.resolvePreset = resolvePreset;
|
||||
exports.loadPlugin = loadPlugin;
|
||||
exports.loadPreset = loadPreset;
|
||||
|
||||
function findConfigUpwards(rootDir) {
|
||||
return null;
|
||||
}
|
||||
|
||||
function findPackageData(filepath) {
|
||||
return {
|
||||
filepath,
|
||||
directories: [],
|
||||
pkg: null,
|
||||
isPackage: false
|
||||
};
|
||||
}
|
||||
|
||||
function findRelativeConfig(pkgData, envName, caller) {
|
||||
return {
|
||||
pkg: null,
|
||||
config: null,
|
||||
ignore: null
|
||||
};
|
||||
}
|
||||
|
||||
function findRootConfig(dirname, envName, caller) {
|
||||
return null;
|
||||
}
|
||||
|
||||
function loadConfig(name, dirname, envName, caller) {
|
||||
throw new Error(`Cannot load ${name} relative to ${dirname} in a browser`);
|
||||
}
|
||||
|
||||
function resolvePlugin(name, dirname) {
|
||||
return null;
|
||||
}
|
||||
|
||||
function resolvePreset(name, dirname) {
|
||||
return null;
|
||||
}
|
||||
|
||||
function loadPlugin(name, dirname) {
|
||||
throw new Error(`Cannot load plugin ${name} relative to ${dirname} in a browser`);
|
||||
}
|
||||
|
||||
function loadPreset(name, dirname) {
|
||||
throw new Error(`Cannot load preset ${name} relative to ${dirname} in a browser`);
|
||||
}
|
67
GenFlightRec/.scannerwork/css-bundle/node_modules/@babel/core/lib/config/files/index.js
generated
vendored
67
GenFlightRec/.scannerwork/css-bundle/node_modules/@babel/core/lib/config/files/index.js
generated
vendored
@ -1,67 +0,0 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
Object.defineProperty(exports, "findPackageData", {
|
||||
enumerable: true,
|
||||
get: function () {
|
||||
return _package.findPackageData;
|
||||
}
|
||||
});
|
||||
Object.defineProperty(exports, "findConfigUpwards", {
|
||||
enumerable: true,
|
||||
get: function () {
|
||||
return _configuration.findConfigUpwards;
|
||||
}
|
||||
});
|
||||
Object.defineProperty(exports, "findRelativeConfig", {
|
||||
enumerable: true,
|
||||
get: function () {
|
||||
return _configuration.findRelativeConfig;
|
||||
}
|
||||
});
|
||||
Object.defineProperty(exports, "findRootConfig", {
|
||||
enumerable: true,
|
||||
get: function () {
|
||||
return _configuration.findRootConfig;
|
||||
}
|
||||
});
|
||||
Object.defineProperty(exports, "loadConfig", {
|
||||
enumerable: true,
|
||||
get: function () {
|
||||
return _configuration.loadConfig;
|
||||
}
|
||||
});
|
||||
Object.defineProperty(exports, "resolvePlugin", {
|
||||
enumerable: true,
|
||||
get: function () {
|
||||
return _plugins.resolvePlugin;
|
||||
}
|
||||
});
|
||||
Object.defineProperty(exports, "resolvePreset", {
|
||||
enumerable: true,
|
||||
get: function () {
|
||||
return _plugins.resolvePreset;
|
||||
}
|
||||
});
|
||||
Object.defineProperty(exports, "loadPlugin", {
|
||||
enumerable: true,
|
||||
get: function () {
|
||||
return _plugins.loadPlugin;
|
||||
}
|
||||
});
|
||||
Object.defineProperty(exports, "loadPreset", {
|
||||
enumerable: true,
|
||||
get: function () {
|
||||
return _plugins.loadPreset;
|
||||
}
|
||||
});
|
||||
|
||||
var _package = require("./package");
|
||||
|
||||
var _configuration = require("./configuration");
|
||||
|
||||
var _plugins = require("./plugins");
|
||||
|
||||
({});
|
@ -1,76 +0,0 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.findPackageData = findPackageData;
|
||||
|
||||
function _path() {
|
||||
const data = _interopRequireDefault(require("path"));
|
||||
|
||||
_path = function () {
|
||||
return data;
|
||||
};
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
var _utils = require("./utils");
|
||||
|
||||
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
||||
|
||||
const PACKAGE_FILENAME = "package.json";
|
||||
|
||||
function findPackageData(filepath) {
|
||||
let pkg = null;
|
||||
const directories = [];
|
||||
let isPackage = true;
|
||||
|
||||
let dirname = _path().default.dirname(filepath);
|
||||
|
||||
while (!pkg && _path().default.basename(dirname) !== "node_modules") {
|
||||
directories.push(dirname);
|
||||
pkg = readConfigPackage(_path().default.join(dirname, PACKAGE_FILENAME));
|
||||
|
||||
const nextLoc = _path().default.dirname(dirname);
|
||||
|
||||
if (dirname === nextLoc) {
|
||||
isPackage = false;
|
||||
break;
|
||||
}
|
||||
|
||||
dirname = nextLoc;
|
||||
}
|
||||
|
||||
return {
|
||||
filepath,
|
||||
directories,
|
||||
pkg,
|
||||
isPackage
|
||||
};
|
||||
}
|
||||
|
||||
const readConfigPackage = (0, _utils.makeStaticFileCache)((filepath, content) => {
|
||||
let options;
|
||||
|
||||
try {
|
||||
options = JSON.parse(content);
|
||||
} catch (err) {
|
||||
err.message = `${filepath}: Error while parsing JSON - ${err.message}`;
|
||||
throw err;
|
||||
}
|
||||
|
||||
if (typeof options !== "object") {
|
||||
throw new Error(`${filepath}: Config returned typeof ${typeof options}`);
|
||||
}
|
||||
|
||||
if (Array.isArray(options)) {
|
||||
throw new Error(`${filepath}: Expected config object but found array`);
|
||||
}
|
||||
|
||||
return {
|
||||
filepath,
|
||||
dirname: _path().default.dirname(filepath),
|
||||
options
|
||||
};
|
||||
});
|
169
GenFlightRec/.scannerwork/css-bundle/node_modules/@babel/core/lib/config/files/plugins.js
generated
vendored
169
GenFlightRec/.scannerwork/css-bundle/node_modules/@babel/core/lib/config/files/plugins.js
generated
vendored
@ -1,169 +0,0 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.resolvePlugin = resolvePlugin;
|
||||
exports.resolvePreset = resolvePreset;
|
||||
exports.loadPlugin = loadPlugin;
|
||||
exports.loadPreset = loadPreset;
|
||||
|
||||
function _debug() {
|
||||
const data = _interopRequireDefault(require("debug"));
|
||||
|
||||
_debug = function () {
|
||||
return data;
|
||||
};
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
function _resolve() {
|
||||
const data = _interopRequireDefault(require("resolve"));
|
||||
|
||||
_resolve = function () {
|
||||
return data;
|
||||
};
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
function _path() {
|
||||
const data = _interopRequireDefault(require("path"));
|
||||
|
||||
_path = function () {
|
||||
return data;
|
||||
};
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
||||
|
||||
const debug = (0, _debug().default)("babel:config:loading:files:plugins");
|
||||
const EXACT_RE = /^module:/;
|
||||
const BABEL_PLUGIN_PREFIX_RE = /^(?!@|module:|[^/]+\/|babel-plugin-)/;
|
||||
const BABEL_PRESET_PREFIX_RE = /^(?!@|module:|[^/]+\/|babel-preset-)/;
|
||||
const BABEL_PLUGIN_ORG_RE = /^(@babel\/)(?!plugin-|[^/]+\/)/;
|
||||
const BABEL_PRESET_ORG_RE = /^(@babel\/)(?!preset-|[^/]+\/)/;
|
||||
const OTHER_PLUGIN_ORG_RE = /^(@(?!babel\/)[^/]+\/)(?![^/]*babel-plugin(?:-|\/|$)|[^/]+\/)/;
|
||||
const OTHER_PRESET_ORG_RE = /^(@(?!babel\/)[^/]+\/)(?![^/]*babel-preset(?:-|\/|$)|[^/]+\/)/;
|
||||
const OTHER_ORG_DEFAULT_RE = /^(@(?!babel$)[^/]+)$/;
|
||||
|
||||
function resolvePlugin(name, dirname) {
|
||||
return resolveStandardizedName("plugin", name, dirname);
|
||||
}
|
||||
|
||||
function resolvePreset(name, dirname) {
|
||||
return resolveStandardizedName("preset", name, dirname);
|
||||
}
|
||||
|
||||
function loadPlugin(name, dirname) {
|
||||
const filepath = resolvePlugin(name, dirname);
|
||||
|
||||
if (!filepath) {
|
||||
throw new Error(`Plugin ${name} not found relative to ${dirname}`);
|
||||
}
|
||||
|
||||
const value = requireModule("plugin", filepath);
|
||||
debug("Loaded plugin %o from %o.", name, dirname);
|
||||
return {
|
||||
filepath,
|
||||
value
|
||||
};
|
||||
}
|
||||
|
||||
function loadPreset(name, dirname) {
|
||||
const filepath = resolvePreset(name, dirname);
|
||||
|
||||
if (!filepath) {
|
||||
throw new Error(`Preset ${name} not found relative to ${dirname}`);
|
||||
}
|
||||
|
||||
const value = requireModule("preset", filepath);
|
||||
debug("Loaded preset %o from %o.", name, dirname);
|
||||
return {
|
||||
filepath,
|
||||
value
|
||||
};
|
||||
}
|
||||
|
||||
function standardizeName(type, name) {
|
||||
if (_path().default.isAbsolute(name)) return name;
|
||||
const isPreset = type === "preset";
|
||||
return name.replace(isPreset ? BABEL_PRESET_PREFIX_RE : BABEL_PLUGIN_PREFIX_RE, `babel-${type}-`).replace(isPreset ? BABEL_PRESET_ORG_RE : BABEL_PLUGIN_ORG_RE, `$1${type}-`).replace(isPreset ? OTHER_PRESET_ORG_RE : OTHER_PLUGIN_ORG_RE, `$1babel-${type}-`).replace(OTHER_ORG_DEFAULT_RE, `$1/babel-${type}`).replace(EXACT_RE, "");
|
||||
}
|
||||
|
||||
function resolveStandardizedName(type, name, dirname = process.cwd()) {
|
||||
const standardizedName = standardizeName(type, name);
|
||||
|
||||
try {
|
||||
return _resolve().default.sync(standardizedName, {
|
||||
basedir: dirname
|
||||
});
|
||||
} catch (e) {
|
||||
if (e.code !== "MODULE_NOT_FOUND") throw e;
|
||||
|
||||
if (standardizedName !== name) {
|
||||
let resolvedOriginal = false;
|
||||
|
||||
try {
|
||||
_resolve().default.sync(name, {
|
||||
basedir: dirname
|
||||
});
|
||||
|
||||
resolvedOriginal = true;
|
||||
} catch (e2) {}
|
||||
|
||||
if (resolvedOriginal) {
|
||||
e.message += `\n- If you want to resolve "${name}", use "module:${name}"`;
|
||||
}
|
||||
}
|
||||
|
||||
let resolvedBabel = false;
|
||||
|
||||
try {
|
||||
_resolve().default.sync(standardizeName(type, "@babel/" + name), {
|
||||
basedir: dirname
|
||||
});
|
||||
|
||||
resolvedBabel = true;
|
||||
} catch (e2) {}
|
||||
|
||||
if (resolvedBabel) {
|
||||
e.message += `\n- Did you mean "@babel/${name}"?`;
|
||||
}
|
||||
|
||||
let resolvedOppositeType = false;
|
||||
const oppositeType = type === "preset" ? "plugin" : "preset";
|
||||
|
||||
try {
|
||||
_resolve().default.sync(standardizeName(oppositeType, name), {
|
||||
basedir: dirname
|
||||
});
|
||||
|
||||
resolvedOppositeType = true;
|
||||
} catch (e2) {}
|
||||
|
||||
if (resolvedOppositeType) {
|
||||
e.message += `\n- Did you accidentally pass a ${oppositeType} as a ${type}?`;
|
||||
}
|
||||
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
const LOADING_MODULES = new Set();
|
||||
|
||||
function requireModule(type, name) {
|
||||
if (LOADING_MODULES.has(name)) {
|
||||
throw new Error(`Reentrant ${type} detected trying to load "${name}". This module is not ignored ` + "and is trying to load itself while compiling itself, leading to a dependency cycle. " + 'We recommend adding it to your "ignore" list in your babelrc, or to a .babelignore.');
|
||||
}
|
||||
|
||||
try {
|
||||
LOADING_MODULES.add(name);
|
||||
return require(name);
|
||||
} finally {
|
||||
LOADING_MODULES.delete(name);
|
||||
}
|
||||
}
|
41
GenFlightRec/.scannerwork/css-bundle/node_modules/@babel/core/lib/config/files/utils.js
generated
vendored
41
GenFlightRec/.scannerwork/css-bundle/node_modules/@babel/core/lib/config/files/utils.js
generated
vendored
@ -1,41 +0,0 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.makeStaticFileCache = makeStaticFileCache;
|
||||
|
||||
function _fs() {
|
||||
const data = _interopRequireDefault(require("fs"));
|
||||
|
||||
_fs = function () {
|
||||
return data;
|
||||
};
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
var _caching = require("../caching");
|
||||
|
||||
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
||||
|
||||
function makeStaticFileCache(fn) {
|
||||
return (0, _caching.makeStrongCache)((filepath, cache) => {
|
||||
if (cache.invalidate(() => fileMtime(filepath)) === null) {
|
||||
cache.forever();
|
||||
return null;
|
||||
}
|
||||
|
||||
return fn(filepath, _fs().default.readFileSync(filepath, "utf8"));
|
||||
});
|
||||
}
|
||||
|
||||
function fileMtime(filepath) {
|
||||
try {
|
||||
return +_fs().default.statSync(filepath).mtime;
|
||||
} catch (e) {
|
||||
if (e.code !== "ENOENT" && e.code !== "ENOTDIR") throw e;
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
258
GenFlightRec/.scannerwork/css-bundle/node_modules/@babel/core/lib/config/full.js
generated
vendored
258
GenFlightRec/.scannerwork/css-bundle/node_modules/@babel/core/lib/config/full.js
generated
vendored
@ -1,258 +0,0 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.default = loadFullConfig;
|
||||
|
||||
var _util = require("./util");
|
||||
|
||||
var context = _interopRequireWildcard(require("../index"));
|
||||
|
||||
var _plugin = _interopRequireDefault(require("./plugin"));
|
||||
|
||||
var _item = require("./item");
|
||||
|
||||
var _configChain = require("./config-chain");
|
||||
|
||||
function _traverse() {
|
||||
const data = _interopRequireDefault(require("@babel/traverse"));
|
||||
|
||||
_traverse = function () {
|
||||
return data;
|
||||
};
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
var _caching = require("./caching");
|
||||
|
||||
var _options = require("./validation/options");
|
||||
|
||||
var _plugins = require("./validation/plugins");
|
||||
|
||||
var _configApi = _interopRequireDefault(require("./helpers/config-api"));
|
||||
|
||||
var _partial = _interopRequireDefault(require("./partial"));
|
||||
|
||||
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
||||
|
||||
function _interopRequireWildcard(obj) { if (obj && obj.__esModule) { return obj; } else { var newObj = {}; if (obj != null) { for (var key in obj) { if (Object.prototype.hasOwnProperty.call(obj, key)) { var desc = Object.defineProperty && Object.getOwnPropertyDescriptor ? Object.getOwnPropertyDescriptor(obj, key) : {}; if (desc.get || desc.set) { Object.defineProperty(newObj, key, desc); } else { newObj[key] = obj[key]; } } } } newObj.default = obj; return newObj; } }
|
||||
|
||||
function loadFullConfig(inputOpts) {
|
||||
const result = (0, _partial.default)(inputOpts);
|
||||
|
||||
if (!result) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const {
|
||||
options,
|
||||
context
|
||||
} = result;
|
||||
const optionDefaults = {};
|
||||
const passes = [[]];
|
||||
|
||||
try {
|
||||
const {
|
||||
plugins,
|
||||
presets
|
||||
} = options;
|
||||
|
||||
if (!plugins || !presets) {
|
||||
throw new Error("Assertion failure - plugins and presets exist");
|
||||
}
|
||||
|
||||
const ignored = function recurseDescriptors(config, pass) {
|
||||
const plugins = config.plugins.reduce((acc, descriptor) => {
|
||||
if (descriptor.options !== false) {
|
||||
acc.push(loadPluginDescriptor(descriptor, context));
|
||||
}
|
||||
|
||||
return acc;
|
||||
}, []);
|
||||
const presets = config.presets.reduce((acc, descriptor) => {
|
||||
if (descriptor.options !== false) {
|
||||
acc.push({
|
||||
preset: loadPresetDescriptor(descriptor, context),
|
||||
pass: descriptor.ownPass ? [] : pass
|
||||
});
|
||||
}
|
||||
|
||||
return acc;
|
||||
}, []);
|
||||
|
||||
if (presets.length > 0) {
|
||||
passes.splice(1, 0, ...presets.map(o => o.pass).filter(p => p !== pass));
|
||||
|
||||
for (const _ref of presets) {
|
||||
const {
|
||||
preset,
|
||||
pass
|
||||
} = _ref;
|
||||
if (!preset) return true;
|
||||
const ignored = recurseDescriptors({
|
||||
plugins: preset.plugins,
|
||||
presets: preset.presets
|
||||
}, pass);
|
||||
if (ignored) return true;
|
||||
preset.options.forEach(opts => {
|
||||
(0, _util.mergeOptions)(optionDefaults, opts);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if (plugins.length > 0) {
|
||||
pass.unshift(...plugins);
|
||||
}
|
||||
}({
|
||||
plugins: plugins.map(item => {
|
||||
const desc = (0, _item.getItemDescriptor)(item);
|
||||
|
||||
if (!desc) {
|
||||
throw new Error("Assertion failure - must be config item");
|
||||
}
|
||||
|
||||
return desc;
|
||||
}),
|
||||
presets: presets.map(item => {
|
||||
const desc = (0, _item.getItemDescriptor)(item);
|
||||
|
||||
if (!desc) {
|
||||
throw new Error("Assertion failure - must be config item");
|
||||
}
|
||||
|
||||
return desc;
|
||||
})
|
||||
}, passes[0]);
|
||||
|
||||
if (ignored) return null;
|
||||
} catch (e) {
|
||||
if (!/^\[BABEL\]/.test(e.message)) {
|
||||
e.message = `[BABEL] ${context.filename || "unknown"}: ${e.message}`;
|
||||
}
|
||||
|
||||
throw e;
|
||||
}
|
||||
|
||||
const opts = optionDefaults;
|
||||
(0, _util.mergeOptions)(opts, options);
|
||||
opts.plugins = passes[0];
|
||||
opts.presets = passes.slice(1).filter(plugins => plugins.length > 0).map(plugins => ({
|
||||
plugins
|
||||
}));
|
||||
opts.passPerPreset = opts.presets.length > 0;
|
||||
return {
|
||||
options: opts,
|
||||
passes: passes
|
||||
};
|
||||
}
|
||||
|
||||
const loadDescriptor = (0, _caching.makeWeakCache)(({
|
||||
value,
|
||||
options,
|
||||
dirname,
|
||||
alias
|
||||
}, cache) => {
|
||||
if (options === false) throw new Error("Assertion failure");
|
||||
options = options || {};
|
||||
let item = value;
|
||||
|
||||
if (typeof value === "function") {
|
||||
const api = Object.assign({}, context, (0, _configApi.default)(cache));
|
||||
|
||||
try {
|
||||
item = value(api, options, dirname);
|
||||
} catch (e) {
|
||||
if (alias) {
|
||||
e.message += ` (While processing: ${JSON.stringify(alias)})`;
|
||||
}
|
||||
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
if (!item || typeof item !== "object") {
|
||||
throw new Error("Plugin/Preset did not return an object.");
|
||||
}
|
||||
|
||||
if (typeof item.then === "function") {
|
||||
throw new Error(`You appear to be using an async plugin, ` + `which your current version of Babel does not support.` + `If you're using a published plugin, ` + `you may need to upgrade your @babel/core version.`);
|
||||
}
|
||||
|
||||
return {
|
||||
value: item,
|
||||
options,
|
||||
dirname,
|
||||
alias
|
||||
};
|
||||
});
|
||||
|
||||
function loadPluginDescriptor(descriptor, context) {
|
||||
if (descriptor.value instanceof _plugin.default) {
|
||||
if (descriptor.options) {
|
||||
throw new Error("Passed options to an existing Plugin instance will not work.");
|
||||
}
|
||||
|
||||
return descriptor.value;
|
||||
}
|
||||
|
||||
return instantiatePlugin(loadDescriptor(descriptor, context), context);
|
||||
}
|
||||
|
||||
const instantiatePlugin = (0, _caching.makeWeakCache)(({
|
||||
value,
|
||||
options,
|
||||
dirname,
|
||||
alias
|
||||
}, cache) => {
|
||||
const pluginObj = (0, _plugins.validatePluginObject)(value);
|
||||
const plugin = Object.assign({}, pluginObj);
|
||||
|
||||
if (plugin.visitor) {
|
||||
plugin.visitor = _traverse().default.explode(Object.assign({}, plugin.visitor));
|
||||
}
|
||||
|
||||
if (plugin.inherits) {
|
||||
const inheritsDescriptor = {
|
||||
name: undefined,
|
||||
alias: `${alias}$inherits`,
|
||||
value: plugin.inherits,
|
||||
options,
|
||||
dirname
|
||||
};
|
||||
const inherits = cache.invalidate(data => loadPluginDescriptor(inheritsDescriptor, data));
|
||||
plugin.pre = chain(inherits.pre, plugin.pre);
|
||||
plugin.post = chain(inherits.post, plugin.post);
|
||||
plugin.manipulateOptions = chain(inherits.manipulateOptions, plugin.manipulateOptions);
|
||||
plugin.visitor = _traverse().default.visitors.merge([inherits.visitor || {}, plugin.visitor || {}]);
|
||||
}
|
||||
|
||||
return new _plugin.default(plugin, options, alias);
|
||||
});
|
||||
|
||||
const loadPresetDescriptor = (descriptor, context) => {
|
||||
return (0, _configChain.buildPresetChain)(instantiatePreset(loadDescriptor(descriptor, context)), context);
|
||||
};
|
||||
|
||||
const instantiatePreset = (0, _caching.makeWeakCache)(({
|
||||
value,
|
||||
dirname,
|
||||
alias
|
||||
}) => {
|
||||
return {
|
||||
options: (0, _options.validate)("preset", value),
|
||||
alias,
|
||||
dirname
|
||||
};
|
||||
});
|
||||
|
||||
function chain(a, b) {
|
||||
const fns = [a, b].filter(Boolean);
|
||||
if (fns.length <= 1) return fns[0];
|
||||
return function (...args) {
|
||||
for (const fn of fns) {
|
||||
fn.apply(this, args);
|
||||
}
|
||||
};
|
||||
}
|
@ -1,86 +0,0 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.default = makeAPI;
|
||||
|
||||
function _semver() {
|
||||
const data = _interopRequireDefault(require("semver"));
|
||||
|
||||
_semver = function () {
|
||||
return data;
|
||||
};
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
var _ = require("../../");
|
||||
|
||||
var _caching = require("../caching");
|
||||
|
||||
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
||||
|
||||
function makeAPI(cache) {
|
||||
const env = value => cache.using(data => {
|
||||
if (typeof value === "undefined") return data.envName;
|
||||
|
||||
if (typeof value === "function") {
|
||||
return (0, _caching.assertSimpleType)(value(data.envName));
|
||||
}
|
||||
|
||||
if (!Array.isArray(value)) value = [value];
|
||||
return value.some(entry => {
|
||||
if (typeof entry !== "string") {
|
||||
throw new Error("Unexpected non-string value");
|
||||
}
|
||||
|
||||
return entry === data.envName;
|
||||
});
|
||||
});
|
||||
|
||||
const caller = cb => cache.using(data => (0, _caching.assertSimpleType)(cb(data.caller)));
|
||||
|
||||
return {
|
||||
version: _.version,
|
||||
cache: cache.simple(),
|
||||
env,
|
||||
async: () => false,
|
||||
caller,
|
||||
assertVersion,
|
||||
tokTypes: undefined
|
||||
};
|
||||
}
|
||||
|
||||
function assertVersion(range) {
|
||||
if (typeof range === "number") {
|
||||
if (!Number.isInteger(range)) {
|
||||
throw new Error("Expected string or integer value.");
|
||||
}
|
||||
|
||||
range = `^${range}.0.0-0`;
|
||||
}
|
||||
|
||||
if (typeof range !== "string") {
|
||||
throw new Error("Expected string or integer value.");
|
||||
}
|
||||
|
||||
if (_semver().default.satisfies(_.version, range)) return;
|
||||
const limit = Error.stackTraceLimit;
|
||||
|
||||
if (typeof limit === "number" && limit < 25) {
|
||||
Error.stackTraceLimit = 25;
|
||||
}
|
||||
|
||||
const err = new Error(`Requires Babel "${range}", but was loaded with "${_.version}". ` + `If you are sure you have a compatible version of @babel/core, ` + `it is likely that something in your build process is loading the ` + `wrong version. Inspect the stack trace of this error to look for ` + `the first entry that doesn't mention "@babel/core" or "babel-core" ` + `to see what is calling Babel.`);
|
||||
|
||||
if (typeof limit === "number") {
|
||||
Error.stackTraceLimit = limit;
|
||||
}
|
||||
|
||||
throw Object.assign(err, {
|
||||
code: "BABEL_VERSION_UNSUPPORTED",
|
||||
version: _.version,
|
||||
range
|
||||
});
|
||||
}
|
@ -1,10 +0,0 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.getEnv = getEnv;
|
||||
|
||||
function getEnv(defaultValue = "development") {
|
||||
return process.env.BABEL_ENV || process.env.NODE_ENV || defaultValue;
|
||||
}
|
29
GenFlightRec/.scannerwork/css-bundle/node_modules/@babel/core/lib/config/index.js
generated
vendored
29
GenFlightRec/.scannerwork/css-bundle/node_modules/@babel/core/lib/config/index.js
generated
vendored
@ -1,29 +0,0 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.loadOptions = loadOptions;
|
||||
Object.defineProperty(exports, "default", {
|
||||
enumerable: true,
|
||||
get: function () {
|
||||
return _full.default;
|
||||
}
|
||||
});
|
||||
Object.defineProperty(exports, "loadPartialConfig", {
|
||||
enumerable: true,
|
||||
get: function () {
|
||||
return _partial.loadPartialConfig;
|
||||
}
|
||||
});
|
||||
|
||||
var _full = _interopRequireDefault(require("./full"));
|
||||
|
||||
var _partial = require("./partial");
|
||||
|
||||
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
||||
|
||||
function loadOptions(opts) {
|
||||
const config = (0, _full.default)(opts);
|
||||
return config ? config.options : null;
|
||||
}
|
66
GenFlightRec/.scannerwork/css-bundle/node_modules/@babel/core/lib/config/item.js
generated
vendored
66
GenFlightRec/.scannerwork/css-bundle/node_modules/@babel/core/lib/config/item.js
generated
vendored
@ -1,66 +0,0 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.createItemFromDescriptor = createItemFromDescriptor;
|
||||
exports.createConfigItem = createConfigItem;
|
||||
exports.getItemDescriptor = getItemDescriptor;
|
||||
|
||||
function _path() {
|
||||
const data = _interopRequireDefault(require("path"));
|
||||
|
||||
_path = function () {
|
||||
return data;
|
||||
};
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
var _configDescriptors = require("./config-descriptors");
|
||||
|
||||
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
||||
|
||||
function createItemFromDescriptor(desc) {
|
||||
return new ConfigItem(desc);
|
||||
}
|
||||
|
||||
function createConfigItem(value, {
|
||||
dirname = ".",
|
||||
type
|
||||
} = {}) {
|
||||
const descriptor = (0, _configDescriptors.createDescriptor)(value, _path().default.resolve(dirname), {
|
||||
type,
|
||||
alias: "programmatic item"
|
||||
});
|
||||
return createItemFromDescriptor(descriptor);
|
||||
}
|
||||
|
||||
function getItemDescriptor(item) {
|
||||
if (item instanceof ConfigItem) {
|
||||
return item._descriptor;
|
||||
}
|
||||
|
||||
return undefined;
|
||||
}
|
||||
|
||||
class ConfigItem {
|
||||
constructor(descriptor) {
|
||||
this._descriptor = descriptor;
|
||||
Object.defineProperty(this, "_descriptor", {
|
||||
enumerable: false
|
||||
});
|
||||
this.value = this._descriptor.value;
|
||||
this.options = this._descriptor.options;
|
||||
this.dirname = this._descriptor.dirname;
|
||||
this.name = this._descriptor.name;
|
||||
this.file = this._descriptor.file ? {
|
||||
request: this._descriptor.file.request,
|
||||
resolved: this._descriptor.file.resolved
|
||||
} : undefined;
|
||||
Object.freeze(this);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
Object.freeze(ConfigItem.prototype);
|
141
GenFlightRec/.scannerwork/css-bundle/node_modules/@babel/core/lib/config/partial.js
generated
vendored
141
GenFlightRec/.scannerwork/css-bundle/node_modules/@babel/core/lib/config/partial.js
generated
vendored
@ -1,141 +0,0 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.default = loadPrivatePartialConfig;
|
||||
exports.loadPartialConfig = loadPartialConfig;
|
||||
|
||||
function _path() {
|
||||
const data = _interopRequireDefault(require("path"));
|
||||
|
||||
_path = function () {
|
||||
return data;
|
||||
};
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
var _plugin = _interopRequireDefault(require("./plugin"));
|
||||
|
||||
var _util = require("./util");
|
||||
|
||||
var _item = require("./item");
|
||||
|
||||
var _configChain = require("./config-chain");
|
||||
|
||||
var _environment = require("./helpers/environment");
|
||||
|
||||
var _options = require("./validation/options");
|
||||
|
||||
var _files = require("./files");
|
||||
|
||||
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
||||
|
||||
function resolveRootMode(rootDir, rootMode) {
|
||||
switch (rootMode) {
|
||||
case "root":
|
||||
return rootDir;
|
||||
|
||||
case "upward-optional":
|
||||
{
|
||||
const upwardRootDir = (0, _files.findConfigUpwards)(rootDir);
|
||||
return upwardRootDir === null ? rootDir : upwardRootDir;
|
||||
}
|
||||
|
||||
case "upward":
|
||||
{
|
||||
const upwardRootDir = (0, _files.findConfigUpwards)(rootDir);
|
||||
if (upwardRootDir !== null) return upwardRootDir;
|
||||
throw Object.assign(new Error(`Babel was run with rootMode:"upward" but a root could not ` + `be found when searching upward from "${rootDir}"`), {
|
||||
code: "BABEL_ROOT_NOT_FOUND",
|
||||
dirname: rootDir
|
||||
});
|
||||
}
|
||||
|
||||
default:
|
||||
throw new Error(`Assertion failure - unknown rootMode value`);
|
||||
}
|
||||
}
|
||||
|
||||
function loadPrivatePartialConfig(inputOpts) {
|
||||
if (inputOpts != null && (typeof inputOpts !== "object" || Array.isArray(inputOpts))) {
|
||||
throw new Error("Babel options must be an object, null, or undefined");
|
||||
}
|
||||
|
||||
const args = inputOpts ? (0, _options.validate)("arguments", inputOpts) : {};
|
||||
const {
|
||||
envName = (0, _environment.getEnv)(),
|
||||
cwd = ".",
|
||||
root: rootDir = ".",
|
||||
rootMode = "root",
|
||||
caller
|
||||
} = args;
|
||||
|
||||
const absoluteCwd = _path().default.resolve(cwd);
|
||||
|
||||
const absoluteRootDir = resolveRootMode(_path().default.resolve(absoluteCwd, rootDir), rootMode);
|
||||
const context = {
|
||||
filename: typeof args.filename === "string" ? _path().default.resolve(cwd, args.filename) : undefined,
|
||||
cwd: absoluteCwd,
|
||||
root: absoluteRootDir,
|
||||
envName,
|
||||
caller
|
||||
};
|
||||
const configChain = (0, _configChain.buildRootChain)(args, context);
|
||||
if (!configChain) return null;
|
||||
const options = {};
|
||||
configChain.options.forEach(opts => {
|
||||
(0, _util.mergeOptions)(options, opts);
|
||||
});
|
||||
options.babelrc = false;
|
||||
options.configFile = false;
|
||||
options.passPerPreset = false;
|
||||
options.envName = context.envName;
|
||||
options.cwd = context.cwd;
|
||||
options.root = context.root;
|
||||
options.filename = typeof context.filename === "string" ? context.filename : undefined;
|
||||
options.plugins = configChain.plugins.map(descriptor => (0, _item.createItemFromDescriptor)(descriptor));
|
||||
options.presets = configChain.presets.map(descriptor => (0, _item.createItemFromDescriptor)(descriptor));
|
||||
return {
|
||||
options,
|
||||
context,
|
||||
ignore: configChain.ignore,
|
||||
babelrc: configChain.babelrc,
|
||||
config: configChain.config
|
||||
};
|
||||
}
|
||||
|
||||
function loadPartialConfig(inputOpts) {
|
||||
const result = loadPrivatePartialConfig(inputOpts);
|
||||
if (!result) return null;
|
||||
const {
|
||||
options,
|
||||
babelrc,
|
||||
ignore,
|
||||
config
|
||||
} = result;
|
||||
(options.plugins || []).forEach(item => {
|
||||
if (item.value instanceof _plugin.default) {
|
||||
throw new Error("Passing cached plugin instances is not supported in " + "babel.loadPartialConfig()");
|
||||
}
|
||||
});
|
||||
return new PartialConfig(options, babelrc ? babelrc.filepath : undefined, ignore ? ignore.filepath : undefined, config ? config.filepath : undefined);
|
||||
}
|
||||
|
||||
class PartialConfig {
|
||||
constructor(options, babelrc, ignore, config) {
|
||||
this.options = options;
|
||||
this.babelignore = ignore;
|
||||
this.babelrc = babelrc;
|
||||
this.config = config;
|
||||
Object.freeze(this);
|
||||
}
|
||||
|
||||
hasFilesystemConfig() {
|
||||
return this.babelrc !== undefined || this.config !== undefined;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
Object.freeze(PartialConfig.prototype);
|
@ -1,52 +0,0 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.default = pathToPattern;
|
||||
|
||||
function _path() {
|
||||
const data = _interopRequireDefault(require("path"));
|
||||
|
||||
_path = function () {
|
||||
return data;
|
||||
};
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
function _escapeRegExp() {
|
||||
const data = _interopRequireDefault(require("lodash/escapeRegExp"));
|
||||
|
||||
_escapeRegExp = function () {
|
||||
return data;
|
||||
};
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
||||
|
||||
const sep = `\\${_path().default.sep}`;
|
||||
const endSep = `(?:${sep}|$)`;
|
||||
const substitution = `[^${sep}]+`;
|
||||
const starPat = `(?:${substitution}${sep})`;
|
||||
const starPatLast = `(?:${substitution}${endSep})`;
|
||||
const starStarPat = `${starPat}*?`;
|
||||
const starStarPatLast = `${starPat}*?${starPatLast}?`;
|
||||
|
||||
function pathToPattern(pattern, dirname) {
|
||||
const parts = _path().default.resolve(dirname, pattern).split(_path().default.sep);
|
||||
|
||||
return new RegExp(["^", ...parts.map((part, i) => {
|
||||
const last = i === parts.length - 1;
|
||||
if (part === "**") return last ? starStarPatLast : starStarPat;
|
||||
if (part === "*") return last ? starPatLast : starPat;
|
||||
|
||||
if (part.indexOf("*.") === 0) {
|
||||
return substitution + (0, _escapeRegExp().default)(part.slice(1)) + (last ? endSep : sep);
|
||||
}
|
||||
|
||||
return (0, _escapeRegExp().default)(part) + (last ? endSep : sep);
|
||||
})].join(""));
|
||||
}
|
22
GenFlightRec/.scannerwork/css-bundle/node_modules/@babel/core/lib/config/plugin.js
generated
vendored
22
GenFlightRec/.scannerwork/css-bundle/node_modules/@babel/core/lib/config/plugin.js
generated
vendored
@ -1,22 +0,0 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.default = void 0;
|
||||
|
||||
class Plugin {
|
||||
constructor(plugin, options, key) {
|
||||
this.key = plugin.name || key;
|
||||
this.manipulateOptions = plugin.manipulateOptions;
|
||||
this.post = plugin.post;
|
||||
this.pre = plugin.pre;
|
||||
this.visitor = plugin.visitor || {};
|
||||
this.parserOverride = plugin.parserOverride;
|
||||
this.generatorOverride = plugin.generatorOverride;
|
||||
this.options = options;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
exports.default = Plugin;
|
30
GenFlightRec/.scannerwork/css-bundle/node_modules/@babel/core/lib/config/util.js
generated
vendored
30
GenFlightRec/.scannerwork/css-bundle/node_modules/@babel/core/lib/config/util.js
generated
vendored
@ -1,30 +0,0 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.mergeOptions = mergeOptions;
|
||||
|
||||
function mergeOptions(target, source) {
|
||||
for (const k of Object.keys(source)) {
|
||||
if (k === "parserOpts" && source.parserOpts) {
|
||||
const parserOpts = source.parserOpts;
|
||||
const targetObj = target.parserOpts = target.parserOpts || {};
|
||||
mergeDefaultFields(targetObj, parserOpts);
|
||||
} else if (k === "generatorOpts" && source.generatorOpts) {
|
||||
const generatorOpts = source.generatorOpts;
|
||||
const targetObj = target.generatorOpts = target.generatorOpts || {};
|
||||
mergeDefaultFields(targetObj, generatorOpts);
|
||||
} else {
|
||||
const val = source[k];
|
||||
if (val !== undefined) target[k] = val;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function mergeDefaultFields(target, source) {
|
||||
for (const k of Object.keys(source)) {
|
||||
const val = source[k];
|
||||
if (val !== undefined) target[k] = val;
|
||||
}
|
||||
}
|
@ -1,268 +0,0 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.msg = msg;
|
||||
exports.access = access;
|
||||
exports.assertRootMode = assertRootMode;
|
||||
exports.assertSourceMaps = assertSourceMaps;
|
||||
exports.assertCompact = assertCompact;
|
||||
exports.assertSourceType = assertSourceType;
|
||||
exports.assertCallerMetadata = assertCallerMetadata;
|
||||
exports.assertInputSourceMap = assertInputSourceMap;
|
||||
exports.assertString = assertString;
|
||||
exports.assertFunction = assertFunction;
|
||||
exports.assertBoolean = assertBoolean;
|
||||
exports.assertObject = assertObject;
|
||||
exports.assertArray = assertArray;
|
||||
exports.assertIgnoreList = assertIgnoreList;
|
||||
exports.assertConfigApplicableTest = assertConfigApplicableTest;
|
||||
exports.assertConfigFileSearch = assertConfigFileSearch;
|
||||
exports.assertBabelrcSearch = assertBabelrcSearch;
|
||||
exports.assertPluginList = assertPluginList;
|
||||
|
||||
function msg(loc) {
|
||||
switch (loc.type) {
|
||||
case "root":
|
||||
return ``;
|
||||
|
||||
case "env":
|
||||
return `${msg(loc.parent)}.env["${loc.name}"]`;
|
||||
|
||||
case "overrides":
|
||||
return `${msg(loc.parent)}.overrides[${loc.index}]`;
|
||||
|
||||
case "option":
|
||||
return `${msg(loc.parent)}.${loc.name}`;
|
||||
|
||||
case "access":
|
||||
return `${msg(loc.parent)}[${JSON.stringify(loc.name)}]`;
|
||||
|
||||
default:
|
||||
throw new Error(`Assertion failure: Unknown type ${loc.type}`);
|
||||
}
|
||||
}
|
||||
|
||||
function access(loc, name) {
|
||||
return {
|
||||
type: "access",
|
||||
name,
|
||||
parent: loc
|
||||
};
|
||||
}
|
||||
|
||||
function assertRootMode(loc, value) {
|
||||
if (value !== undefined && value !== "root" && value !== "upward" && value !== "upward-optional") {
|
||||
throw new Error(`${msg(loc)} must be a "root", "upward", "upward-optional" or undefined`);
|
||||
}
|
||||
|
||||
return value;
|
||||
}
|
||||
|
||||
function assertSourceMaps(loc, value) {
|
||||
if (value !== undefined && typeof value !== "boolean" && value !== "inline" && value !== "both") {
|
||||
throw new Error(`${msg(loc)} must be a boolean, "inline", "both", or undefined`);
|
||||
}
|
||||
|
||||
return value;
|
||||
}
|
||||
|
||||
function assertCompact(loc, value) {
|
||||
if (value !== undefined && typeof value !== "boolean" && value !== "auto") {
|
||||
throw new Error(`${msg(loc)} must be a boolean, "auto", or undefined`);
|
||||
}
|
||||
|
||||
return value;
|
||||
}
|
||||
|
||||
function assertSourceType(loc, value) {
|
||||
if (value !== undefined && value !== "module" && value !== "script" && value !== "unambiguous") {
|
||||
throw new Error(`${msg(loc)} must be "module", "script", "unambiguous", or undefined`);
|
||||
}
|
||||
|
||||
return value;
|
||||
}
|
||||
|
||||
function assertCallerMetadata(loc, value) {
|
||||
const obj = assertObject(loc, value);
|
||||
|
||||
if (obj) {
|
||||
if (typeof obj["name"] !== "string") {
|
||||
throw new Error(`${msg(loc)} set but does not contain "name" property string`);
|
||||
}
|
||||
|
||||
for (const prop of Object.keys(obj)) {
|
||||
const propLoc = access(loc, prop);
|
||||
const value = obj[prop];
|
||||
|
||||
if (value != null && typeof value !== "boolean" && typeof value !== "string" && typeof value !== "number") {
|
||||
throw new Error(`${msg(propLoc)} must be null, undefined, a boolean, a string, or a number.`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return value;
|
||||
}
|
||||
|
||||
function assertInputSourceMap(loc, value) {
|
||||
if (value !== undefined && typeof value !== "boolean" && (typeof value !== "object" || !value)) {
|
||||
throw new Error(`${msg(loc)} must be a boolean, object, or undefined`);
|
||||
}
|
||||
|
||||
return value;
|
||||
}
|
||||
|
||||
function assertString(loc, value) {
|
||||
if (value !== undefined && typeof value !== "string") {
|
||||
throw new Error(`${msg(loc)} must be a string, or undefined`);
|
||||
}
|
||||
|
||||
return value;
|
||||
}
|
||||
|
||||
function assertFunction(loc, value) {
|
||||
if (value !== undefined && typeof value !== "function") {
|
||||
throw new Error(`${msg(loc)} must be a function, or undefined`);
|
||||
}
|
||||
|
||||
return value;
|
||||
}
|
||||
|
||||
function assertBoolean(loc, value) {
|
||||
if (value !== undefined && typeof value !== "boolean") {
|
||||
throw new Error(`${msg(loc)} must be a boolean, or undefined`);
|
||||
}
|
||||
|
||||
return value;
|
||||
}
|
||||
|
||||
function assertObject(loc, value) {
|
||||
if (value !== undefined && (typeof value !== "object" || Array.isArray(value) || !value)) {
|
||||
throw new Error(`${msg(loc)} must be an object, or undefined`);
|
||||
}
|
||||
|
||||
return value;
|
||||
}
|
||||
|
||||
function assertArray(loc, value) {
|
||||
if (value != null && !Array.isArray(value)) {
|
||||
throw new Error(`${msg(loc)} must be an array, or undefined`);
|
||||
}
|
||||
|
||||
return value;
|
||||
}
|
||||
|
||||
function assertIgnoreList(loc, value) {
|
||||
const arr = assertArray(loc, value);
|
||||
|
||||
if (arr) {
|
||||
arr.forEach((item, i) => assertIgnoreItem(access(loc, i), item));
|
||||
}
|
||||
|
||||
return arr;
|
||||
}
|
||||
|
||||
function assertIgnoreItem(loc, value) {
|
||||
if (typeof value !== "string" && typeof value !== "function" && !(value instanceof RegExp)) {
|
||||
throw new Error(`${msg(loc)} must be an array of string/Funtion/RegExp values, or undefined`);
|
||||
}
|
||||
|
||||
return value;
|
||||
}
|
||||
|
||||
function assertConfigApplicableTest(loc, value) {
|
||||
if (value === undefined) return value;
|
||||
|
||||
if (Array.isArray(value)) {
|
||||
value.forEach((item, i) => {
|
||||
if (!checkValidTest(item)) {
|
||||
throw new Error(`${msg(access(loc, i))} must be a string/Function/RegExp.`);
|
||||
}
|
||||
});
|
||||
} else if (!checkValidTest(value)) {
|
||||
throw new Error(`${msg(loc)} must be a string/Function/RegExp, or an array of those`);
|
||||
}
|
||||
|
||||
return value;
|
||||
}
|
||||
|
||||
function checkValidTest(value) {
|
||||
return typeof value === "string" || typeof value === "function" || value instanceof RegExp;
|
||||
}
|
||||
|
||||
function assertConfigFileSearch(loc, value) {
|
||||
if (value !== undefined && typeof value !== "boolean" && typeof value !== "string") {
|
||||
throw new Error(`${msg(loc)} must be a undefined, a boolean, a string, ` + `got ${JSON.stringify(value)}`);
|
||||
}
|
||||
|
||||
return value;
|
||||
}
|
||||
|
||||
function assertBabelrcSearch(loc, value) {
|
||||
if (value === undefined || typeof value === "boolean") return value;
|
||||
|
||||
if (Array.isArray(value)) {
|
||||
value.forEach((item, i) => {
|
||||
if (!checkValidTest(item)) {
|
||||
throw new Error(`${msg(access(loc, i))} must be a string/Function/RegExp.`);
|
||||
}
|
||||
});
|
||||
} else if (!checkValidTest(value)) {
|
||||
throw new Error(`${msg(loc)} must be a undefined, a boolean, a string/Function/RegExp ` + `or an array of those, got ${JSON.stringify(value)}`);
|
||||
}
|
||||
|
||||
return value;
|
||||
}
|
||||
|
||||
function assertPluginList(loc, value) {
|
||||
const arr = assertArray(loc, value);
|
||||
|
||||
if (arr) {
|
||||
arr.forEach((item, i) => assertPluginItem(access(loc, i), item));
|
||||
}
|
||||
|
||||
return arr;
|
||||
}
|
||||
|
||||
function assertPluginItem(loc, value) {
|
||||
if (Array.isArray(value)) {
|
||||
if (value.length === 0) {
|
||||
throw new Error(`${msg(loc)} must include an object`);
|
||||
}
|
||||
|
||||
if (value.length > 3) {
|
||||
throw new Error(`${msg(loc)} may only be a two-tuple or three-tuple`);
|
||||
}
|
||||
|
||||
assertPluginTarget(access(loc, 0), value[0]);
|
||||
|
||||
if (value.length > 1) {
|
||||
const opts = value[1];
|
||||
|
||||
if (opts !== undefined && opts !== false && (typeof opts !== "object" || Array.isArray(opts))) {
|
||||
throw new Error(`${msg(access(loc, 1))} must be an object, false, or undefined`);
|
||||
}
|
||||
}
|
||||
|
||||
if (value.length === 3) {
|
||||
const name = value[2];
|
||||
|
||||
if (name !== undefined && typeof name !== "string") {
|
||||
throw new Error(`${msg(access(loc, 2))} must be a string, or undefined`);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
assertPluginTarget(loc, value);
|
||||
}
|
||||
|
||||
return value;
|
||||
}
|
||||
|
||||
function assertPluginTarget(loc, value) {
|
||||
if ((typeof value !== "object" || !value) && typeof value !== "string" && typeof value !== "function") {
|
||||
throw new Error(`${msg(loc)} must be a string, object, function`);
|
||||
}
|
||||
|
||||
return value;
|
||||
}
|
@ -1,188 +0,0 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.validate = validate;
|
||||
|
||||
var _plugin = _interopRequireDefault(require("../plugin"));
|
||||
|
||||
var _removed = _interopRequireDefault(require("./removed"));
|
||||
|
||||
var _optionAssertions = require("./option-assertions");
|
||||
|
||||
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
||||
|
||||
const ROOT_VALIDATORS = {
|
||||
cwd: _optionAssertions.assertString,
|
||||
root: _optionAssertions.assertString,
|
||||
rootMode: _optionAssertions.assertRootMode,
|
||||
configFile: _optionAssertions.assertConfigFileSearch,
|
||||
caller: _optionAssertions.assertCallerMetadata,
|
||||
filename: _optionAssertions.assertString,
|
||||
filenameRelative: _optionAssertions.assertString,
|
||||
code: _optionAssertions.assertBoolean,
|
||||
ast: _optionAssertions.assertBoolean,
|
||||
envName: _optionAssertions.assertString
|
||||
};
|
||||
const BABELRC_VALIDATORS = {
|
||||
babelrc: _optionAssertions.assertBoolean,
|
||||
babelrcRoots: _optionAssertions.assertBabelrcSearch
|
||||
};
|
||||
const NONPRESET_VALIDATORS = {
|
||||
extends: _optionAssertions.assertString,
|
||||
ignore: _optionAssertions.assertIgnoreList,
|
||||
only: _optionAssertions.assertIgnoreList
|
||||
};
|
||||
const COMMON_VALIDATORS = {
|
||||
inputSourceMap: _optionAssertions.assertInputSourceMap,
|
||||
presets: _optionAssertions.assertPluginList,
|
||||
plugins: _optionAssertions.assertPluginList,
|
||||
passPerPreset: _optionAssertions.assertBoolean,
|
||||
env: assertEnvSet,
|
||||
overrides: assertOverridesList,
|
||||
test: _optionAssertions.assertConfigApplicableTest,
|
||||
include: _optionAssertions.assertConfigApplicableTest,
|
||||
exclude: _optionAssertions.assertConfigApplicableTest,
|
||||
retainLines: _optionAssertions.assertBoolean,
|
||||
comments: _optionAssertions.assertBoolean,
|
||||
shouldPrintComment: _optionAssertions.assertFunction,
|
||||
compact: _optionAssertions.assertCompact,
|
||||
minified: _optionAssertions.assertBoolean,
|
||||
auxiliaryCommentBefore: _optionAssertions.assertString,
|
||||
auxiliaryCommentAfter: _optionAssertions.assertString,
|
||||
sourceType: _optionAssertions.assertSourceType,
|
||||
wrapPluginVisitorMethod: _optionAssertions.assertFunction,
|
||||
highlightCode: _optionAssertions.assertBoolean,
|
||||
sourceMaps: _optionAssertions.assertSourceMaps,
|
||||
sourceMap: _optionAssertions.assertSourceMaps,
|
||||
sourceFileName: _optionAssertions.assertString,
|
||||
sourceRoot: _optionAssertions.assertString,
|
||||
getModuleId: _optionAssertions.assertFunction,
|
||||
moduleRoot: _optionAssertions.assertString,
|
||||
moduleIds: _optionAssertions.assertBoolean,
|
||||
moduleId: _optionAssertions.assertString,
|
||||
parserOpts: _optionAssertions.assertObject,
|
||||
generatorOpts: _optionAssertions.assertObject
|
||||
};
|
||||
|
||||
function getSource(loc) {
|
||||
return loc.type === "root" ? loc.source : getSource(loc.parent);
|
||||
}
|
||||
|
||||
function validate(type, opts) {
|
||||
return validateNested({
|
||||
type: "root",
|
||||
source: type
|
||||
}, opts);
|
||||
}
|
||||
|
||||
function validateNested(loc, opts) {
|
||||
const type = getSource(loc);
|
||||
assertNoDuplicateSourcemap(opts);
|
||||
Object.keys(opts).forEach(key => {
|
||||
const optLoc = {
|
||||
type: "option",
|
||||
name: key,
|
||||
parent: loc
|
||||
};
|
||||
|
||||
if (type === "preset" && NONPRESET_VALIDATORS[key]) {
|
||||
throw new Error(`${(0, _optionAssertions.msg)(optLoc)} is not allowed in preset options`);
|
||||
}
|
||||
|
||||
if (type !== "arguments" && ROOT_VALIDATORS[key]) {
|
||||
throw new Error(`${(0, _optionAssertions.msg)(optLoc)} is only allowed in root programmatic options`);
|
||||
}
|
||||
|
||||
if (type !== "arguments" && type !== "configfile" && BABELRC_VALIDATORS[key]) {
|
||||
if (type === "babelrcfile" || type === "extendsfile") {
|
||||
throw new Error(`${(0, _optionAssertions.msg)(optLoc)} is not allowed in .babelrc or "extends"ed files, only in root programmatic options, ` + `or babel.config.js/config file options`);
|
||||
}
|
||||
|
||||
throw new Error(`${(0, _optionAssertions.msg)(optLoc)} is only allowed in root programmatic options, or babel.config.js/config file options`);
|
||||
}
|
||||
|
||||
const validator = COMMON_VALIDATORS[key] || NONPRESET_VALIDATORS[key] || BABELRC_VALIDATORS[key] || ROOT_VALIDATORS[key] || throwUnknownError;
|
||||
validator(optLoc, opts[key]);
|
||||
});
|
||||
return opts;
|
||||
}
|
||||
|
||||
function throwUnknownError(loc) {
|
||||
const key = loc.name;
|
||||
|
||||
if (_removed.default[key]) {
|
||||
const {
|
||||
message,
|
||||
version = 5
|
||||
} = _removed.default[key];
|
||||
throw new ReferenceError(`Using removed Babel ${version} option: ${(0, _optionAssertions.msg)(loc)} - ${message}`);
|
||||
} else {
|
||||
const unknownOptErr = `Unknown option: ${(0, _optionAssertions.msg)(loc)}. Check out https://babeljs.io/docs/en/babel-core/#options for more information about options.`;
|
||||
throw new ReferenceError(unknownOptErr);
|
||||
}
|
||||
}
|
||||
|
||||
function has(obj, key) {
|
||||
return Object.prototype.hasOwnProperty.call(obj, key);
|
||||
}
|
||||
|
||||
function assertNoDuplicateSourcemap(opts) {
|
||||
if (has(opts, "sourceMap") && has(opts, "sourceMaps")) {
|
||||
throw new Error(".sourceMap is an alias for .sourceMaps, cannot use both");
|
||||
}
|
||||
}
|
||||
|
||||
function assertEnvSet(loc, value) {
|
||||
if (loc.parent.type === "env") {
|
||||
throw new Error(`${(0, _optionAssertions.msg)(loc)} is not allowed inside of another .env block`);
|
||||
}
|
||||
|
||||
const parent = loc.parent;
|
||||
const obj = (0, _optionAssertions.assertObject)(loc, value);
|
||||
|
||||
if (obj) {
|
||||
for (const envName of Object.keys(obj)) {
|
||||
const env = (0, _optionAssertions.assertObject)((0, _optionAssertions.access)(loc, envName), obj[envName]);
|
||||
if (!env) continue;
|
||||
const envLoc = {
|
||||
type: "env",
|
||||
name: envName,
|
||||
parent
|
||||
};
|
||||
validateNested(envLoc, env);
|
||||
}
|
||||
}
|
||||
|
||||
return obj;
|
||||
}
|
||||
|
||||
function assertOverridesList(loc, value) {
|
||||
if (loc.parent.type === "env") {
|
||||
throw new Error(`${(0, _optionAssertions.msg)(loc)} is not allowed inside an .env block`);
|
||||
}
|
||||
|
||||
if (loc.parent.type === "overrides") {
|
||||
throw new Error(`${(0, _optionAssertions.msg)(loc)} is not allowed inside an .overrides block`);
|
||||
}
|
||||
|
||||
const parent = loc.parent;
|
||||
const arr = (0, _optionAssertions.assertArray)(loc, value);
|
||||
|
||||
if (arr) {
|
||||
for (const [index, item] of arr.entries()) {
|
||||
const objLoc = (0, _optionAssertions.access)(loc, index);
|
||||
const env = (0, _optionAssertions.assertObject)(objLoc, item);
|
||||
if (!env) throw new Error(`${(0, _optionAssertions.msg)(objLoc)} must be an object`);
|
||||
const overridesLoc = {
|
||||
type: "overrides",
|
||||
index,
|
||||
parent
|
||||
};
|
||||
validateNested(overridesLoc, env);
|
||||
}
|
||||
}
|
||||
|
||||
return arr;
|
||||
}
|
@ -1,55 +0,0 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.validatePluginObject = validatePluginObject;
|
||||
|
||||
var _optionAssertions = require("./option-assertions");
|
||||
|
||||
const VALIDATORS = {
|
||||
name: _optionAssertions.assertString,
|
||||
manipulateOptions: _optionAssertions.assertFunction,
|
||||
pre: _optionAssertions.assertFunction,
|
||||
post: _optionAssertions.assertFunction,
|
||||
inherits: _optionAssertions.assertFunction,
|
||||
visitor: assertVisitorMap,
|
||||
parserOverride: _optionAssertions.assertFunction,
|
||||
generatorOverride: _optionAssertions.assertFunction
|
||||
};
|
||||
|
||||
function assertVisitorMap(key, value) {
|
||||
const obj = (0, _optionAssertions.assertObject)(key, value);
|
||||
|
||||
if (obj) {
|
||||
Object.keys(obj).forEach(prop => assertVisitorHandler(prop, obj[prop]));
|
||||
|
||||
if (obj.enter || obj.exit) {
|
||||
throw new Error(`.${key} cannot contain catch-all "enter" or "exit" handlers. Please target individual nodes.`);
|
||||
}
|
||||
}
|
||||
|
||||
return obj;
|
||||
}
|
||||
|
||||
function assertVisitorHandler(key, value) {
|
||||
if (value && typeof value === "object") {
|
||||
Object.keys(value).forEach(handler => {
|
||||
if (handler !== "enter" && handler !== "exit") {
|
||||
throw new Error(`.visitor["${key}"] may only have .enter and/or .exit handlers.`);
|
||||
}
|
||||
});
|
||||
} else if (typeof value !== "function") {
|
||||
throw new Error(`.visitor["${key}"] must be a function`);
|
||||
}
|
||||
|
||||
return value;
|
||||
}
|
||||
|
||||
function validatePluginObject(obj) {
|
||||
Object.keys(obj).forEach(key => {
|
||||
const validator = VALIDATORS[key];
|
||||
if (validator) validator(key, obj[key]);else throw new Error(`.${key} is not a valid Plugin property`);
|
||||
});
|
||||
return obj;
|
||||
}
|
@ -1,66 +0,0 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.default = void 0;
|
||||
var _default = {
|
||||
auxiliaryComment: {
|
||||
message: "Use `auxiliaryCommentBefore` or `auxiliaryCommentAfter`"
|
||||
},
|
||||
blacklist: {
|
||||
message: "Put the specific transforms you want in the `plugins` option"
|
||||
},
|
||||
breakConfig: {
|
||||
message: "This is not a necessary option in Babel 6"
|
||||
},
|
||||
experimental: {
|
||||
message: "Put the specific transforms you want in the `plugins` option"
|
||||
},
|
||||
externalHelpers: {
|
||||
message: "Use the `external-helpers` plugin instead. " + "Check out http://babeljs.io/docs/plugins/external-helpers/"
|
||||
},
|
||||
extra: {
|
||||
message: ""
|
||||
},
|
||||
jsxPragma: {
|
||||
message: "use the `pragma` option in the `react-jsx` plugin. " + "Check out http://babeljs.io/docs/plugins/transform-react-jsx/"
|
||||
},
|
||||
loose: {
|
||||
message: "Specify the `loose` option for the relevant plugin you are using " + "or use a preset that sets the option."
|
||||
},
|
||||
metadataUsedHelpers: {
|
||||
message: "Not required anymore as this is enabled by default"
|
||||
},
|
||||
modules: {
|
||||
message: "Use the corresponding module transform plugin in the `plugins` option. " + "Check out http://babeljs.io/docs/plugins/#modules"
|
||||
},
|
||||
nonStandard: {
|
||||
message: "Use the `react-jsx` and `flow-strip-types` plugins to support JSX and Flow. " + "Also check out the react preset http://babeljs.io/docs/plugins/preset-react/"
|
||||
},
|
||||
optional: {
|
||||
message: "Put the specific transforms you want in the `plugins` option"
|
||||
},
|
||||
sourceMapName: {
|
||||
message: "The `sourceMapName` option has been removed because it makes more sense for the " + "tooling that calls Babel to assign `map.file` themselves."
|
||||
},
|
||||
stage: {
|
||||
message: "Check out the corresponding stage-x presets http://babeljs.io/docs/plugins/#presets"
|
||||
},
|
||||
whitelist: {
|
||||
message: "Put the specific transforms you want in the `plugins` option"
|
||||
},
|
||||
resolveModuleSource: {
|
||||
version: 6,
|
||||
message: "Use `babel-plugin-module-resolver@3`'s 'resolvePath' options"
|
||||
},
|
||||
metadata: {
|
||||
version: 6,
|
||||
message: "Generated plugin metadata is always included in the output result"
|
||||
},
|
||||
sourceMapTarget: {
|
||||
version: 6,
|
||||
message: "The `sourceMapTarget` option has been removed because it makes more sense for the tooling " + "that calls Babel to assign `map.file` themselves."
|
||||
}
|
||||
};
|
||||
exports.default = _default;
|
@ -1,240 +0,0 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.Plugin = Plugin;
|
||||
Object.defineProperty(exports, "File", {
|
||||
enumerable: true,
|
||||
get: function () {
|
||||
return _file.default;
|
||||
}
|
||||
});
|
||||
Object.defineProperty(exports, "buildExternalHelpers", {
|
||||
enumerable: true,
|
||||
get: function () {
|
||||
return _buildExternalHelpers.default;
|
||||
}
|
||||
});
|
||||
Object.defineProperty(exports, "resolvePlugin", {
|
||||
enumerable: true,
|
||||
get: function () {
|
||||
return _files.resolvePlugin;
|
||||
}
|
||||
});
|
||||
Object.defineProperty(exports, "resolvePreset", {
|
||||
enumerable: true,
|
||||
get: function () {
|
||||
return _files.resolvePreset;
|
||||
}
|
||||
});
|
||||
Object.defineProperty(exports, "version", {
|
||||
enumerable: true,
|
||||
get: function () {
|
||||
return _package.version;
|
||||
}
|
||||
});
|
||||
Object.defineProperty(exports, "getEnv", {
|
||||
enumerable: true,
|
||||
get: function () {
|
||||
return _environment.getEnv;
|
||||
}
|
||||
});
|
||||
Object.defineProperty(exports, "tokTypes", {
|
||||
enumerable: true,
|
||||
get: function () {
|
||||
return _parser().tokTypes;
|
||||
}
|
||||
});
|
||||
Object.defineProperty(exports, "traverse", {
|
||||
enumerable: true,
|
||||
get: function () {
|
||||
return _traverse().default;
|
||||
}
|
||||
});
|
||||
Object.defineProperty(exports, "template", {
|
||||
enumerable: true,
|
||||
get: function () {
|
||||
return _template().default;
|
||||
}
|
||||
});
|
||||
Object.defineProperty(exports, "createConfigItem", {
|
||||
enumerable: true,
|
||||
get: function () {
|
||||
return _item.createConfigItem;
|
||||
}
|
||||
});
|
||||
Object.defineProperty(exports, "loadPartialConfig", {
|
||||
enumerable: true,
|
||||
get: function () {
|
||||
return _config.loadPartialConfig;
|
||||
}
|
||||
});
|
||||
Object.defineProperty(exports, "loadOptions", {
|
||||
enumerable: true,
|
||||
get: function () {
|
||||
return _config.loadOptions;
|
||||
}
|
||||
});
|
||||
Object.defineProperty(exports, "transform", {
|
||||
enumerable: true,
|
||||
get: function () {
|
||||
return _transform.transform;
|
||||
}
|
||||
});
|
||||
Object.defineProperty(exports, "transformSync", {
|
||||
enumerable: true,
|
||||
get: function () {
|
||||
return _transform.transformSync;
|
||||
}
|
||||
});
|
||||
Object.defineProperty(exports, "transformAsync", {
|
||||
enumerable: true,
|
||||
get: function () {
|
||||
return _transform.transformAsync;
|
||||
}
|
||||
});
|
||||
Object.defineProperty(exports, "transformFile", {
|
||||
enumerable: true,
|
||||
get: function () {
|
||||
return _transformFile.transformFile;
|
||||
}
|
||||
});
|
||||
Object.defineProperty(exports, "transformFileSync", {
|
||||
enumerable: true,
|
||||
get: function () {
|
||||
return _transformFile.transformFileSync;
|
||||
}
|
||||
});
|
||||
Object.defineProperty(exports, "transformFileAsync", {
|
||||
enumerable: true,
|
||||
get: function () {
|
||||
return _transformFile.transformFileAsync;
|
||||
}
|
||||
});
|
||||
Object.defineProperty(exports, "transformFromAst", {
|
||||
enumerable: true,
|
||||
get: function () {
|
||||
return _transformAst.transformFromAst;
|
||||
}
|
||||
});
|
||||
Object.defineProperty(exports, "transformFromAstSync", {
|
||||
enumerable: true,
|
||||
get: function () {
|
||||
return _transformAst.transformFromAstSync;
|
||||
}
|
||||
});
|
||||
Object.defineProperty(exports, "transformFromAstAsync", {
|
||||
enumerable: true,
|
||||
get: function () {
|
||||
return _transformAst.transformFromAstAsync;
|
||||
}
|
||||
});
|
||||
Object.defineProperty(exports, "parse", {
|
||||
enumerable: true,
|
||||
get: function () {
|
||||
return _parse.parse;
|
||||
}
|
||||
});
|
||||
Object.defineProperty(exports, "parseSync", {
|
||||
enumerable: true,
|
||||
get: function () {
|
||||
return _parse.parseSync;
|
||||
}
|
||||
});
|
||||
Object.defineProperty(exports, "parseAsync", {
|
||||
enumerable: true,
|
||||
get: function () {
|
||||
return _parse.parseAsync;
|
||||
}
|
||||
});
|
||||
exports.types = exports.OptionManager = exports.DEFAULT_EXTENSIONS = void 0;
|
||||
|
||||
var _file = _interopRequireDefault(require("./transformation/file/file"));
|
||||
|
||||
var _buildExternalHelpers = _interopRequireDefault(require("./tools/build-external-helpers"));
|
||||
|
||||
var _files = require("./config/files");
|
||||
|
||||
var _package = require("../package.json");
|
||||
|
||||
var _environment = require("./config/helpers/environment");
|
||||
|
||||
function _types() {
|
||||
const data = _interopRequireWildcard(require("@babel/types"));
|
||||
|
||||
_types = function () {
|
||||
return data;
|
||||
};
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
Object.defineProperty(exports, "types", {
|
||||
enumerable: true,
|
||||
get: function () {
|
||||
return _types();
|
||||
}
|
||||
});
|
||||
|
||||
function _parser() {
|
||||
const data = require("@babel/parser");
|
||||
|
||||
_parser = function () {
|
||||
return data;
|
||||
};
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
function _traverse() {
|
||||
const data = _interopRequireDefault(require("@babel/traverse"));
|
||||
|
||||
_traverse = function () {
|
||||
return data;
|
||||
};
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
function _template() {
|
||||
const data = _interopRequireDefault(require("@babel/template"));
|
||||
|
||||
_template = function () {
|
||||
return data;
|
||||
};
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
var _item = require("./config/item");
|
||||
|
||||
var _config = require("./config");
|
||||
|
||||
var _transform = require("./transform");
|
||||
|
||||
var _transformFile = require("./transform-file");
|
||||
|
||||
var _transformAst = require("./transform-ast");
|
||||
|
||||
var _parse = require("./parse");
|
||||
|
||||
function _interopRequireWildcard(obj) { if (obj && obj.__esModule) { return obj; } else { var newObj = {}; if (obj != null) { for (var key in obj) { if (Object.prototype.hasOwnProperty.call(obj, key)) { var desc = Object.defineProperty && Object.getOwnPropertyDescriptor ? Object.getOwnPropertyDescriptor(obj, key) : {}; if (desc.get || desc.set) { Object.defineProperty(newObj, key, desc); } else { newObj[key] = obj[key]; } } } } newObj.default = obj; return newObj; } }
|
||||
|
||||
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
||||
|
||||
const DEFAULT_EXTENSIONS = Object.freeze([".js", ".jsx", ".es6", ".es", ".mjs"]);
|
||||
exports.DEFAULT_EXTENSIONS = DEFAULT_EXTENSIONS;
|
||||
|
||||
class OptionManager {
|
||||
init(opts) {
|
||||
return (0, _config.loadOptions)(opts);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
exports.OptionManager = OptionManager;
|
||||
|
||||
function Plugin(alias) {
|
||||
throw new Error(`The (${alias}) Babel 5 plugin is being run with an unsupported Babel version.`);
|
||||
}
|
@ -1,65 +0,0 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.parseSync = parseSync;
|
||||
exports.parseAsync = parseAsync;
|
||||
exports.parse = void 0;
|
||||
|
||||
var _config = _interopRequireDefault(require("./config"));
|
||||
|
||||
var _normalizeFile = _interopRequireDefault(require("./transformation/normalize-file"));
|
||||
|
||||
var _normalizeOpts = _interopRequireDefault(require("./transformation/normalize-opts"));
|
||||
|
||||
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
||||
|
||||
const parse = function parse(code, opts, callback) {
|
||||
if (typeof opts === "function") {
|
||||
callback = opts;
|
||||
opts = undefined;
|
||||
}
|
||||
|
||||
if (callback === undefined) return parseSync(code, opts);
|
||||
const config = (0, _config.default)(opts);
|
||||
|
||||
if (config === null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const cb = callback;
|
||||
process.nextTick(() => {
|
||||
let ast = null;
|
||||
|
||||
try {
|
||||
const cfg = (0, _config.default)(opts);
|
||||
if (cfg === null) return cb(null, null);
|
||||
ast = (0, _normalizeFile.default)(cfg.passes, (0, _normalizeOpts.default)(cfg), code).ast;
|
||||
} catch (err) {
|
||||
return cb(err);
|
||||
}
|
||||
|
||||
cb(null, ast);
|
||||
});
|
||||
};
|
||||
|
||||
exports.parse = parse;
|
||||
|
||||
function parseSync(code, opts) {
|
||||
const config = (0, _config.default)(opts);
|
||||
|
||||
if (config === null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return (0, _normalizeFile.default)(config.passes, (0, _normalizeOpts.default)(config), code).ast;
|
||||
}
|
||||
|
||||
function parseAsync(code, opts) {
|
||||
return new Promise((res, rej) => {
|
||||
parse(code, opts, (err, result) => {
|
||||
if (err == null) res(result);else rej(err);
|
||||
});
|
||||
});
|
||||
}
|
@ -1,143 +0,0 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.default = _default;
|
||||
|
||||
function helpers() {
|
||||
const data = _interopRequireWildcard(require("@babel/helpers"));
|
||||
|
||||
helpers = function () {
|
||||
return data;
|
||||
};
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
function _generator() {
|
||||
const data = _interopRequireDefault(require("@babel/generator"));
|
||||
|
||||
_generator = function () {
|
||||
return data;
|
||||
};
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
function _template() {
|
||||
const data = _interopRequireDefault(require("@babel/template"));
|
||||
|
||||
_template = function () {
|
||||
return data;
|
||||
};
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
function t() {
|
||||
const data = _interopRequireWildcard(require("@babel/types"));
|
||||
|
||||
t = function () {
|
||||
return data;
|
||||
};
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
||||
|
||||
function _interopRequireWildcard(obj) { if (obj && obj.__esModule) { return obj; } else { var newObj = {}; if (obj != null) { for (var key in obj) { if (Object.prototype.hasOwnProperty.call(obj, key)) { var desc = Object.defineProperty && Object.getOwnPropertyDescriptor ? Object.getOwnPropertyDescriptor(obj, key) : {}; if (desc.get || desc.set) { Object.defineProperty(newObj, key, desc); } else { newObj[key] = obj[key]; } } } } newObj.default = obj; return newObj; } }
|
||||
|
||||
const buildUmdWrapper = replacements => _template().default`
|
||||
(function (root, factory) {
|
||||
if (typeof define === "function" && define.amd) {
|
||||
define(AMD_ARGUMENTS, factory);
|
||||
} else if (typeof exports === "object") {
|
||||
factory(COMMON_ARGUMENTS);
|
||||
} else {
|
||||
factory(BROWSER_ARGUMENTS);
|
||||
}
|
||||
})(UMD_ROOT, function (FACTORY_PARAMETERS) {
|
||||
FACTORY_BODY
|
||||
});
|
||||
`(replacements);
|
||||
|
||||
function buildGlobal(whitelist) {
|
||||
const namespace = t().identifier("babelHelpers");
|
||||
const body = [];
|
||||
const container = t().functionExpression(null, [t().identifier("global")], t().blockStatement(body));
|
||||
const tree = t().program([t().expressionStatement(t().callExpression(container, [t().conditionalExpression(t().binaryExpression("===", t().unaryExpression("typeof", t().identifier("global")), t().stringLiteral("undefined")), t().identifier("self"), t().identifier("global"))]))]);
|
||||
body.push(t().variableDeclaration("var", [t().variableDeclarator(namespace, t().assignmentExpression("=", t().memberExpression(t().identifier("global"), namespace), t().objectExpression([])))]));
|
||||
buildHelpers(body, namespace, whitelist);
|
||||
return tree;
|
||||
}
|
||||
|
||||
function buildModule(whitelist) {
|
||||
const body = [];
|
||||
const refs = buildHelpers(body, null, whitelist);
|
||||
body.unshift(t().exportNamedDeclaration(null, Object.keys(refs).map(name => {
|
||||
return t().exportSpecifier(t().cloneNode(refs[name]), t().identifier(name));
|
||||
})));
|
||||
return t().program(body, [], "module");
|
||||
}
|
||||
|
||||
function buildUmd(whitelist) {
|
||||
const namespace = t().identifier("babelHelpers");
|
||||
const body = [];
|
||||
body.push(t().variableDeclaration("var", [t().variableDeclarator(namespace, t().identifier("global"))]));
|
||||
buildHelpers(body, namespace, whitelist);
|
||||
return t().program([buildUmdWrapper({
|
||||
FACTORY_PARAMETERS: t().identifier("global"),
|
||||
BROWSER_ARGUMENTS: t().assignmentExpression("=", t().memberExpression(t().identifier("root"), namespace), t().objectExpression([])),
|
||||
COMMON_ARGUMENTS: t().identifier("exports"),
|
||||
AMD_ARGUMENTS: t().arrayExpression([t().stringLiteral("exports")]),
|
||||
FACTORY_BODY: body,
|
||||
UMD_ROOT: t().identifier("this")
|
||||
})]);
|
||||
}
|
||||
|
||||
function buildVar(whitelist) {
|
||||
const namespace = t().identifier("babelHelpers");
|
||||
const body = [];
|
||||
body.push(t().variableDeclaration("var", [t().variableDeclarator(namespace, t().objectExpression([]))]));
|
||||
const tree = t().program(body);
|
||||
buildHelpers(body, namespace, whitelist);
|
||||
body.push(t().expressionStatement(namespace));
|
||||
return tree;
|
||||
}
|
||||
|
||||
function buildHelpers(body, namespace, whitelist) {
|
||||
const getHelperReference = name => {
|
||||
return namespace ? t().memberExpression(namespace, t().identifier(name)) : t().identifier(`_${name}`);
|
||||
};
|
||||
|
||||
const refs = {};
|
||||
helpers().list.forEach(function (name) {
|
||||
if (whitelist && whitelist.indexOf(name) < 0) return;
|
||||
const ref = refs[name] = getHelperReference(name);
|
||||
const {
|
||||
nodes
|
||||
} = helpers().get(name, getHelperReference, ref);
|
||||
body.push(...nodes);
|
||||
});
|
||||
return refs;
|
||||
}
|
||||
|
||||
function _default(whitelist, outputType = "global") {
|
||||
let tree;
|
||||
const build = {
|
||||
global: buildGlobal,
|
||||
module: buildModule,
|
||||
umd: buildUmd,
|
||||
var: buildVar
|
||||
}[outputType];
|
||||
|
||||
if (build) {
|
||||
tree = build(whitelist);
|
||||
} else {
|
||||
throw new Error(`Unsupported output type ${outputType}`);
|
||||
}
|
||||
|
||||
return (0, _generator().default)(tree).code;
|
||||
}
|
54
GenFlightRec/.scannerwork/css-bundle/node_modules/@babel/core/lib/transform-ast.js
generated
vendored
54
GenFlightRec/.scannerwork/css-bundle/node_modules/@babel/core/lib/transform-ast.js
generated
vendored
@ -1,54 +0,0 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.transformFromAstSync = transformFromAstSync;
|
||||
exports.transformFromAstAsync = transformFromAstAsync;
|
||||
exports.transformFromAst = void 0;
|
||||
|
||||
var _config = _interopRequireDefault(require("./config"));
|
||||
|
||||
var _transformation = require("./transformation");
|
||||
|
||||
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
||||
|
||||
const transformFromAst = function transformFromAst(ast, code, opts, callback) {
|
||||
if (typeof opts === "function") {
|
||||
callback = opts;
|
||||
opts = undefined;
|
||||
}
|
||||
|
||||
if (callback === undefined) return transformFromAstSync(ast, code, opts);
|
||||
const cb = callback;
|
||||
process.nextTick(() => {
|
||||
let cfg;
|
||||
|
||||
try {
|
||||
cfg = (0, _config.default)(opts);
|
||||
if (cfg === null) return cb(null, null);
|
||||
} catch (err) {
|
||||
return cb(err);
|
||||
}
|
||||
|
||||
if (!ast) return cb(new Error("No AST given"));
|
||||
(0, _transformation.runAsync)(cfg, code, ast, cb);
|
||||
});
|
||||
};
|
||||
|
||||
exports.transformFromAst = transformFromAst;
|
||||
|
||||
function transformFromAstSync(ast, code, opts) {
|
||||
const config = (0, _config.default)(opts);
|
||||
if (config === null) return null;
|
||||
if (!ast) throw new Error("No AST given");
|
||||
return (0, _transformation.runSync)(config, code, ast);
|
||||
}
|
||||
|
||||
function transformFromAstAsync(ast, code, opts) {
|
||||
return new Promise((res, rej) => {
|
||||
transformFromAst(ast, code, opts, (err, result) => {
|
||||
if (err == null) res(result);else rej(err);
|
||||
});
|
||||
});
|
||||
}
|
@ -1,26 +0,0 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.transformFileSync = transformFileSync;
|
||||
exports.transformFileAsync = transformFileAsync;
|
||||
exports.transformFile = void 0;
|
||||
|
||||
const transformFile = function transformFile(filename, opts, callback) {
|
||||
if (typeof opts === "function") {
|
||||
callback = opts;
|
||||
}
|
||||
|
||||
callback(new Error("Transforming files is not supported in browsers"), null);
|
||||
};
|
||||
|
||||
exports.transformFile = transformFile;
|
||||
|
||||
function transformFileSync() {
|
||||
throw new Error("Transforming files is not supported in browsers");
|
||||
}
|
||||
|
||||
function transformFileAsync() {
|
||||
return Promise.reject(new Error("Transforming files is not supported in browsers"));
|
||||
}
|
91
GenFlightRec/.scannerwork/css-bundle/node_modules/@babel/core/lib/transform-file.js
generated
vendored
91
GenFlightRec/.scannerwork/css-bundle/node_modules/@babel/core/lib/transform-file.js
generated
vendored
@ -1,91 +0,0 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.transformFileSync = transformFileSync;
|
||||
exports.transformFileAsync = transformFileAsync;
|
||||
exports.transformFile = void 0;
|
||||
|
||||
function _fs() {
|
||||
const data = _interopRequireDefault(require("fs"));
|
||||
|
||||
_fs = function () {
|
||||
return data;
|
||||
};
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
var _config = _interopRequireDefault(require("./config"));
|
||||
|
||||
var _transformation = require("./transformation");
|
||||
|
||||
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
||||
|
||||
({});
|
||||
|
||||
const transformFile = function transformFile(filename, opts, callback) {
|
||||
let options;
|
||||
|
||||
if (typeof opts === "function") {
|
||||
callback = opts;
|
||||
opts = undefined;
|
||||
}
|
||||
|
||||
if (opts == null) {
|
||||
options = {
|
||||
filename
|
||||
};
|
||||
} else if (opts && typeof opts === "object") {
|
||||
options = Object.assign({}, opts, {
|
||||
filename
|
||||
});
|
||||
}
|
||||
|
||||
process.nextTick(() => {
|
||||
let cfg;
|
||||
|
||||
try {
|
||||
cfg = (0, _config.default)(options);
|
||||
if (cfg === null) return callback(null, null);
|
||||
} catch (err) {
|
||||
return callback(err);
|
||||
}
|
||||
|
||||
const config = cfg;
|
||||
|
||||
_fs().default.readFile(filename, "utf8", function (err, code) {
|
||||
if (err) return callback(err, null);
|
||||
(0, _transformation.runAsync)(config, code, null, callback);
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
exports.transformFile = transformFile;
|
||||
|
||||
function transformFileSync(filename, opts) {
|
||||
let options;
|
||||
|
||||
if (opts == null) {
|
||||
options = {
|
||||
filename
|
||||
};
|
||||
} else if (opts && typeof opts === "object") {
|
||||
options = Object.assign({}, opts, {
|
||||
filename
|
||||
});
|
||||
}
|
||||
|
||||
const config = (0, _config.default)(options);
|
||||
if (config === null) return null;
|
||||
return (0, _transformation.runSync)(config, _fs().default.readFileSync(filename, "utf8"));
|
||||
}
|
||||
|
||||
function transformFileAsync(filename, opts) {
|
||||
return new Promise((res, rej) => {
|
||||
transformFile(filename, opts, (err, result) => {
|
||||
if (err == null) res(result);else rej(err);
|
||||
});
|
||||
});
|
||||
}
|
52
GenFlightRec/.scannerwork/css-bundle/node_modules/@babel/core/lib/transform.js
generated
vendored
52
GenFlightRec/.scannerwork/css-bundle/node_modules/@babel/core/lib/transform.js
generated
vendored
@ -1,52 +0,0 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.transformSync = transformSync;
|
||||
exports.transformAsync = transformAsync;
|
||||
exports.transform = void 0;
|
||||
|
||||
var _config = _interopRequireDefault(require("./config"));
|
||||
|
||||
var _transformation = require("./transformation");
|
||||
|
||||
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
||||
|
||||
const transform = function transform(code, opts, callback) {
|
||||
if (typeof opts === "function") {
|
||||
callback = opts;
|
||||
opts = undefined;
|
||||
}
|
||||
|
||||
if (callback === undefined) return transformSync(code, opts);
|
||||
const cb = callback;
|
||||
process.nextTick(() => {
|
||||
let cfg;
|
||||
|
||||
try {
|
||||
cfg = (0, _config.default)(opts);
|
||||
if (cfg === null) return cb(null, null);
|
||||
} catch (err) {
|
||||
return cb(err);
|
||||
}
|
||||
|
||||
(0, _transformation.runAsync)(cfg, code, null, cb);
|
||||
});
|
||||
};
|
||||
|
||||
exports.transform = transform;
|
||||
|
||||
function transformSync(code, opts) {
|
||||
const config = (0, _config.default)(opts);
|
||||
if (config === null) return null;
|
||||
return (0, _transformation.runSync)(config, code);
|
||||
}
|
||||
|
||||
function transformAsync(code, opts) {
|
||||
return new Promise((res, rej) => {
|
||||
transform(code, opts, (err, result) => {
|
||||
if (err == null) res(result);else rej(err);
|
||||
});
|
||||
});
|
||||
}
|
@ -1,67 +0,0 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.default = loadBlockHoistPlugin;
|
||||
|
||||
function _sortBy() {
|
||||
const data = _interopRequireDefault(require("lodash/sortBy"));
|
||||
|
||||
_sortBy = function () {
|
||||
return data;
|
||||
};
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
var _config = _interopRequireDefault(require("../config"));
|
||||
|
||||
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
||||
|
||||
let LOADED_PLUGIN;
|
||||
|
||||
function loadBlockHoistPlugin() {
|
||||
if (!LOADED_PLUGIN) {
|
||||
const config = (0, _config.default)({
|
||||
babelrc: false,
|
||||
configFile: false,
|
||||
plugins: [blockHoistPlugin]
|
||||
});
|
||||
LOADED_PLUGIN = config ? config.passes[0][0] : undefined;
|
||||
if (!LOADED_PLUGIN) throw new Error("Assertion failure");
|
||||
}
|
||||
|
||||
return LOADED_PLUGIN;
|
||||
}
|
||||
|
||||
const blockHoistPlugin = {
|
||||
name: "internal.blockHoist",
|
||||
visitor: {
|
||||
Block: {
|
||||
exit({
|
||||
node
|
||||
}) {
|
||||
let hasChange = false;
|
||||
|
||||
for (let i = 0; i < node.body.length; i++) {
|
||||
const bodyNode = node.body[i];
|
||||
|
||||
if (bodyNode && bodyNode._blockHoist != null) {
|
||||
hasChange = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (!hasChange) return;
|
||||
node.body = (0, _sortBy().default)(node.body, function (bodyNode) {
|
||||
let priority = bodyNode && bodyNode._blockHoist;
|
||||
if (priority == null) priority = 1;
|
||||
if (priority === true) priority = 2;
|
||||
return -1 * priority;
|
||||
});
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
};
|
@ -1,266 +0,0 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.default = void 0;
|
||||
|
||||
function helpers() {
|
||||
const data = _interopRequireWildcard(require("@babel/helpers"));
|
||||
|
||||
helpers = function () {
|
||||
return data;
|
||||
};
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
function _traverse() {
|
||||
const data = _interopRequireWildcard(require("@babel/traverse"));
|
||||
|
||||
_traverse = function () {
|
||||
return data;
|
||||
};
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
function _codeFrame() {
|
||||
const data = require("@babel/code-frame");
|
||||
|
||||
_codeFrame = function () {
|
||||
return data;
|
||||
};
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
function t() {
|
||||
const data = _interopRequireWildcard(require("@babel/types"));
|
||||
|
||||
t = function () {
|
||||
return data;
|
||||
};
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
function _semver() {
|
||||
const data = _interopRequireDefault(require("semver"));
|
||||
|
||||
_semver = function () {
|
||||
return data;
|
||||
};
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
||||
|
||||
function _interopRequireWildcard(obj) { if (obj && obj.__esModule) { return obj; } else { var newObj = {}; if (obj != null) { for (var key in obj) { if (Object.prototype.hasOwnProperty.call(obj, key)) { var desc = Object.defineProperty && Object.getOwnPropertyDescriptor ? Object.getOwnPropertyDescriptor(obj, key) : {}; if (desc.get || desc.set) { Object.defineProperty(newObj, key, desc); } else { newObj[key] = obj[key]; } } } } newObj.default = obj; return newObj; } }
|
||||
|
||||
const errorVisitor = {
|
||||
enter(path, state) {
|
||||
const loc = path.node.loc;
|
||||
|
||||
if (loc) {
|
||||
state.loc = loc;
|
||||
path.stop();
|
||||
}
|
||||
}
|
||||
|
||||
};
|
||||
|
||||
class File {
|
||||
constructor(options, {
|
||||
code,
|
||||
ast,
|
||||
inputMap
|
||||
}) {
|
||||
this._map = new Map();
|
||||
this.declarations = {};
|
||||
this.path = null;
|
||||
this.ast = {};
|
||||
this.metadata = {};
|
||||
this.code = "";
|
||||
this.inputMap = null;
|
||||
this.hub = {
|
||||
file: this,
|
||||
getCode: () => this.code,
|
||||
getScope: () => this.scope,
|
||||
addHelper: this.addHelper.bind(this),
|
||||
buildError: this.buildCodeFrameError.bind(this)
|
||||
};
|
||||
this.opts = options;
|
||||
this.code = code;
|
||||
this.ast = ast;
|
||||
this.inputMap = inputMap;
|
||||
this.path = _traverse().NodePath.get({
|
||||
hub: this.hub,
|
||||
parentPath: null,
|
||||
parent: this.ast,
|
||||
container: this.ast,
|
||||
key: "program"
|
||||
}).setContext();
|
||||
this.scope = this.path.scope;
|
||||
}
|
||||
|
||||
get shebang() {
|
||||
const {
|
||||
interpreter
|
||||
} = this.path.node;
|
||||
return interpreter ? interpreter.value : "";
|
||||
}
|
||||
|
||||
set shebang(value) {
|
||||
if (value) {
|
||||
this.path.get("interpreter").replaceWith(t().interpreterDirective(value));
|
||||
} else {
|
||||
this.path.get("interpreter").remove();
|
||||
}
|
||||
}
|
||||
|
||||
set(key, val) {
|
||||
if (key === "helpersNamespace") {
|
||||
throw new Error("Babel 7.0.0-beta.56 has dropped support for the 'helpersNamespace' utility." + "If you are using @babel/plugin-external-helpers you will need to use a newer " + "version than the one you currently have installed. " + "If you have your own implementation, you'll want to explore using 'helperGenerator' " + "alongside 'file.availableHelper()'.");
|
||||
}
|
||||
|
||||
this._map.set(key, val);
|
||||
}
|
||||
|
||||
get(key) {
|
||||
return this._map.get(key);
|
||||
}
|
||||
|
||||
has(key) {
|
||||
return this._map.has(key);
|
||||
}
|
||||
|
||||
getModuleName() {
|
||||
const {
|
||||
filename,
|
||||
filenameRelative = filename,
|
||||
moduleId,
|
||||
moduleIds = !!moduleId,
|
||||
getModuleId,
|
||||
sourceRoot: sourceRootTmp,
|
||||
moduleRoot = sourceRootTmp,
|
||||
sourceRoot = moduleRoot
|
||||
} = this.opts;
|
||||
if (!moduleIds) return null;
|
||||
|
||||
if (moduleId != null && !getModuleId) {
|
||||
return moduleId;
|
||||
}
|
||||
|
||||
let moduleName = moduleRoot != null ? moduleRoot + "/" : "";
|
||||
|
||||
if (filenameRelative) {
|
||||
const sourceRootReplacer = sourceRoot != null ? new RegExp("^" + sourceRoot + "/?") : "";
|
||||
moduleName += filenameRelative.replace(sourceRootReplacer, "").replace(/\.(\w*?)$/, "");
|
||||
}
|
||||
|
||||
moduleName = moduleName.replace(/\\/g, "/");
|
||||
|
||||
if (getModuleId) {
|
||||
return getModuleId(moduleName) || moduleName;
|
||||
} else {
|
||||
return moduleName;
|
||||
}
|
||||
}
|
||||
|
||||
addImport() {
|
||||
throw new Error("This API has been removed. If you're looking for this " + "functionality in Babel 7, you should import the " + "'@babel/helper-module-imports' module and use the functions exposed " + " from that module, such as 'addNamed' or 'addDefault'.");
|
||||
}
|
||||
|
||||
availableHelper(name, versionRange) {
|
||||
let minVersion;
|
||||
|
||||
try {
|
||||
minVersion = helpers().minVersion(name);
|
||||
} catch (err) {
|
||||
if (err.code !== "BABEL_HELPER_UNKNOWN") throw err;
|
||||
return false;
|
||||
}
|
||||
|
||||
if (typeof versionRange !== "string") return true;
|
||||
if (_semver().default.valid(versionRange)) versionRange = `^${versionRange}`;
|
||||
return !_semver().default.intersects(`<${minVersion}`, versionRange) && !_semver().default.intersects(`>=8.0.0`, versionRange);
|
||||
}
|
||||
|
||||
addHelper(name) {
|
||||
const declar = this.declarations[name];
|
||||
if (declar) return t().cloneNode(declar);
|
||||
const generator = this.get("helperGenerator");
|
||||
|
||||
if (generator) {
|
||||
const res = generator(name);
|
||||
if (res) return res;
|
||||
}
|
||||
|
||||
const uid = this.declarations[name] = this.scope.generateUidIdentifier(name);
|
||||
const dependencies = {};
|
||||
|
||||
for (const dep of helpers().getDependencies(name)) {
|
||||
dependencies[dep] = this.addHelper(dep);
|
||||
}
|
||||
|
||||
const {
|
||||
nodes,
|
||||
globals
|
||||
} = helpers().get(name, dep => dependencies[dep], uid, Object.keys(this.scope.getAllBindings()));
|
||||
globals.forEach(name => {
|
||||
if (this.path.scope.hasBinding(name, true)) {
|
||||
this.path.scope.rename(name);
|
||||
}
|
||||
});
|
||||
nodes.forEach(node => {
|
||||
node._compact = true;
|
||||
});
|
||||
this.path.unshiftContainer("body", nodes);
|
||||
this.path.get("body").forEach(path => {
|
||||
if (nodes.indexOf(path.node) === -1) return;
|
||||
if (path.isVariableDeclaration()) this.scope.registerDeclaration(path);
|
||||
});
|
||||
return uid;
|
||||
}
|
||||
|
||||
addTemplateObject() {
|
||||
throw new Error("This function has been moved into the template literal transform itself.");
|
||||
}
|
||||
|
||||
buildCodeFrameError(node, msg, Error = SyntaxError) {
|
||||
let loc = node && (node.loc || node._loc);
|
||||
msg = `${this.opts.filename}: ${msg}`;
|
||||
|
||||
if (!loc && node) {
|
||||
const state = {
|
||||
loc: null
|
||||
};
|
||||
(0, _traverse().default)(node, errorVisitor, this.scope, state);
|
||||
loc = state.loc;
|
||||
let txt = "This is an error on an internal node. Probably an internal error.";
|
||||
if (loc) txt += " Location has been estimated.";
|
||||
msg += ` (${txt})`;
|
||||
}
|
||||
|
||||
if (loc) {
|
||||
const {
|
||||
highlightCode = true
|
||||
} = this.opts;
|
||||
msg += "\n" + (0, _codeFrame().codeFrameColumns)(this.code, {
|
||||
start: {
|
||||
line: loc.start.line,
|
||||
column: loc.start.column + 1
|
||||
}
|
||||
}, {
|
||||
highlightCode
|
||||
});
|
||||
}
|
||||
|
||||
return new Error(msg);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
exports.default = File;
|
@ -1,89 +0,0 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.default = generateCode;
|
||||
|
||||
function _convertSourceMap() {
|
||||
const data = _interopRequireDefault(require("convert-source-map"));
|
||||
|
||||
_convertSourceMap = function () {
|
||||
return data;
|
||||
};
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
function _generator() {
|
||||
const data = _interopRequireDefault(require("@babel/generator"));
|
||||
|
||||
_generator = function () {
|
||||
return data;
|
||||
};
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
var _mergeMap = _interopRequireDefault(require("./merge-map"));
|
||||
|
||||
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
||||
|
||||
function generateCode(pluginPasses, file) {
|
||||
const {
|
||||
opts,
|
||||
ast,
|
||||
code,
|
||||
inputMap
|
||||
} = file;
|
||||
const results = [];
|
||||
|
||||
for (const plugins of pluginPasses) {
|
||||
for (const plugin of plugins) {
|
||||
const {
|
||||
generatorOverride
|
||||
} = plugin;
|
||||
|
||||
if (generatorOverride) {
|
||||
const result = generatorOverride(ast, opts.generatorOpts, code, _generator().default);
|
||||
if (result !== undefined) results.push(result);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let result;
|
||||
|
||||
if (results.length === 0) {
|
||||
result = (0, _generator().default)(ast, opts.generatorOpts, code);
|
||||
} else if (results.length === 1) {
|
||||
result = results[0];
|
||||
|
||||
if (typeof result.then === "function") {
|
||||
throw new Error(`You appear to be using an async parser plugin, ` + `which your current version of Babel does not support. ` + `If you're using a published plugin, ` + `you may need to upgrade your @babel/core version.`);
|
||||
}
|
||||
} else {
|
||||
throw new Error("More than one plugin attempted to override codegen.");
|
||||
}
|
||||
|
||||
let {
|
||||
code: outputCode,
|
||||
map: outputMap
|
||||
} = result;
|
||||
|
||||
if (outputMap && inputMap) {
|
||||
outputMap = (0, _mergeMap.default)(inputMap.toObject(), outputMap);
|
||||
}
|
||||
|
||||
if (opts.sourceMaps === "inline" || opts.sourceMaps === "both") {
|
||||
outputCode += "\n" + _convertSourceMap().default.fromObject(outputMap).toComment();
|
||||
}
|
||||
|
||||
if (opts.sourceMaps === "inline") {
|
||||
outputMap = null;
|
||||
}
|
||||
|
||||
return {
|
||||
outputCode,
|
||||
outputMap
|
||||
};
|
||||
}
|
@ -1,255 +0,0 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.default = mergeSourceMap;
|
||||
|
||||
function _sourceMap() {
|
||||
const data = _interopRequireDefault(require("source-map"));
|
||||
|
||||
_sourceMap = function () {
|
||||
return data;
|
||||
};
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
||||
|
||||
function mergeSourceMap(inputMap, map) {
|
||||
const input = buildMappingData(inputMap);
|
||||
const output = buildMappingData(map);
|
||||
const mergedGenerator = new (_sourceMap().default.SourceMapGenerator)();
|
||||
|
||||
for (const _ref of input.sources) {
|
||||
const {
|
||||
source
|
||||
} = _ref;
|
||||
|
||||
if (typeof source.content === "string") {
|
||||
mergedGenerator.setSourceContent(source.path, source.content);
|
||||
}
|
||||
}
|
||||
|
||||
if (output.sources.length === 1) {
|
||||
const defaultSource = output.sources[0];
|
||||
const insertedMappings = new Map();
|
||||
eachInputGeneratedRange(input, (generated, original, source) => {
|
||||
eachOverlappingGeneratedOutputRange(defaultSource, generated, item => {
|
||||
const key = makeMappingKey(item);
|
||||
if (insertedMappings.has(key)) return;
|
||||
insertedMappings.set(key, item);
|
||||
mergedGenerator.addMapping({
|
||||
source: source.path,
|
||||
original: {
|
||||
line: original.line,
|
||||
column: original.columnStart
|
||||
},
|
||||
generated: {
|
||||
line: item.line,
|
||||
column: item.columnStart
|
||||
},
|
||||
name: original.name
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
for (const item of insertedMappings.values()) {
|
||||
if (item.columnEnd === Infinity) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const clearItem = {
|
||||
line: item.line,
|
||||
columnStart: item.columnEnd
|
||||
};
|
||||
const key = makeMappingKey(clearItem);
|
||||
|
||||
if (insertedMappings.has(key)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
mergedGenerator.addMapping({
|
||||
generated: {
|
||||
line: clearItem.line,
|
||||
column: clearItem.columnStart
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
const result = mergedGenerator.toJSON();
|
||||
|
||||
if (typeof input.sourceRoot === "string") {
|
||||
result.sourceRoot = input.sourceRoot;
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
function makeMappingKey(item) {
|
||||
return `${item.line}/${item.columnStart}`;
|
||||
}
|
||||
|
||||
function eachOverlappingGeneratedOutputRange(outputFile, inputGeneratedRange, callback) {
|
||||
const overlappingOriginal = filterApplicableOriginalRanges(outputFile, inputGeneratedRange);
|
||||
|
||||
for (const _ref2 of overlappingOriginal) {
|
||||
const {
|
||||
generated
|
||||
} = _ref2;
|
||||
|
||||
for (const item of generated) {
|
||||
callback(item);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function filterApplicableOriginalRanges({
|
||||
mappings
|
||||
}, {
|
||||
line,
|
||||
columnStart,
|
||||
columnEnd
|
||||
}) {
|
||||
return filterSortedArray(mappings, ({
|
||||
original: outOriginal
|
||||
}) => {
|
||||
if (line > outOriginal.line) return -1;
|
||||
if (line < outOriginal.line) return 1;
|
||||
if (columnStart >= outOriginal.columnEnd) return -1;
|
||||
if (columnEnd <= outOriginal.columnStart) return 1;
|
||||
return 0;
|
||||
});
|
||||
}
|
||||
|
||||
function eachInputGeneratedRange(map, callback) {
|
||||
for (const _ref3 of map.sources) {
|
||||
const {
|
||||
source,
|
||||
mappings
|
||||
} = _ref3;
|
||||
|
||||
for (const _ref4 of mappings) {
|
||||
const {
|
||||
original,
|
||||
generated
|
||||
} = _ref4;
|
||||
|
||||
for (const item of generated) {
|
||||
callback(item, original, source);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function buildMappingData(map) {
|
||||
const consumer = new (_sourceMap().default.SourceMapConsumer)(Object.assign({}, map, {
|
||||
sourceRoot: null
|
||||
}));
|
||||
const sources = new Map();
|
||||
const mappings = new Map();
|
||||
let last = null;
|
||||
consumer.computeColumnSpans();
|
||||
consumer.eachMapping(m => {
|
||||
if (m.originalLine === null) return;
|
||||
let source = sources.get(m.source);
|
||||
|
||||
if (!source) {
|
||||
source = {
|
||||
path: m.source,
|
||||
content: consumer.sourceContentFor(m.source, true)
|
||||
};
|
||||
sources.set(m.source, source);
|
||||
}
|
||||
|
||||
let sourceData = mappings.get(source);
|
||||
|
||||
if (!sourceData) {
|
||||
sourceData = {
|
||||
source,
|
||||
mappings: []
|
||||
};
|
||||
mappings.set(source, sourceData);
|
||||
}
|
||||
|
||||
const obj = {
|
||||
line: m.originalLine,
|
||||
columnStart: m.originalColumn,
|
||||
columnEnd: Infinity,
|
||||
name: m.name
|
||||
};
|
||||
|
||||
if (last && last.source === source && last.mapping.line === m.originalLine) {
|
||||
last.mapping.columnEnd = m.originalColumn;
|
||||
}
|
||||
|
||||
last = {
|
||||
source,
|
||||
mapping: obj
|
||||
};
|
||||
sourceData.mappings.push({
|
||||
original: obj,
|
||||
generated: consumer.allGeneratedPositionsFor({
|
||||
source: m.source,
|
||||
line: m.originalLine,
|
||||
column: m.originalColumn
|
||||
}).map(item => ({
|
||||
line: item.line,
|
||||
columnStart: item.column,
|
||||
columnEnd: item.lastColumn + 1
|
||||
}))
|
||||
});
|
||||
}, null, _sourceMap().default.SourceMapConsumer.ORIGINAL_ORDER);
|
||||
return {
|
||||
file: map.file,
|
||||
sourceRoot: map.sourceRoot,
|
||||
sources: Array.from(mappings.values())
|
||||
};
|
||||
}
|
||||
|
||||
function findInsertionLocation(array, callback) {
|
||||
let left = 0;
|
||||
let right = array.length;
|
||||
|
||||
while (left < right) {
|
||||
const mid = Math.floor((left + right) / 2);
|
||||
const item = array[mid];
|
||||
const result = callback(item);
|
||||
|
||||
if (result === 0) {
|
||||
left = mid;
|
||||
break;
|
||||
}
|
||||
|
||||
if (result >= 0) {
|
||||
right = mid;
|
||||
} else {
|
||||
left = mid + 1;
|
||||
}
|
||||
}
|
||||
|
||||
let i = left;
|
||||
|
||||
if (i < array.length) {
|
||||
while (i >= 0 && callback(array[i]) >= 0) {
|
||||
i--;
|
||||
}
|
||||
|
||||
return i + 1;
|
||||
}
|
||||
|
||||
return i;
|
||||
}
|
||||
|
||||
function filterSortedArray(array, callback) {
|
||||
const start = findInsertionLocation(array, callback);
|
||||
const results = [];
|
||||
|
||||
for (let i = start; i < array.length && callback(array[i]) === 0; i++) {
|
||||
results.push(array[i]);
|
||||
}
|
||||
|
||||
return results;
|
||||
}
|
106
GenFlightRec/.scannerwork/css-bundle/node_modules/@babel/core/lib/transformation/index.js
generated
vendored
106
GenFlightRec/.scannerwork/css-bundle/node_modules/@babel/core/lib/transformation/index.js
generated
vendored
@ -1,106 +0,0 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.runAsync = runAsync;
|
||||
exports.runSync = runSync;
|
||||
|
||||
function _traverse() {
|
||||
const data = _interopRequireDefault(require("@babel/traverse"));
|
||||
|
||||
_traverse = function () {
|
||||
return data;
|
||||
};
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
var _pluginPass = _interopRequireDefault(require("./plugin-pass"));
|
||||
|
||||
var _blockHoistPlugin = _interopRequireDefault(require("./block-hoist-plugin"));
|
||||
|
||||
var _normalizeOpts = _interopRequireDefault(require("./normalize-opts"));
|
||||
|
||||
var _normalizeFile = _interopRequireDefault(require("./normalize-file"));
|
||||
|
||||
var _generate = _interopRequireDefault(require("./file/generate"));
|
||||
|
||||
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
||||
|
||||
function runAsync(config, code, ast, callback) {
|
||||
let result;
|
||||
|
||||
try {
|
||||
result = runSync(config, code, ast);
|
||||
} catch (err) {
|
||||
return callback(err);
|
||||
}
|
||||
|
||||
return callback(null, result);
|
||||
}
|
||||
|
||||
function runSync(config, code, ast) {
|
||||
const file = (0, _normalizeFile.default)(config.passes, (0, _normalizeOpts.default)(config), code, ast);
|
||||
transformFile(file, config.passes);
|
||||
const opts = file.opts;
|
||||
const {
|
||||
outputCode,
|
||||
outputMap
|
||||
} = opts.code !== false ? (0, _generate.default)(config.passes, file) : {};
|
||||
return {
|
||||
metadata: file.metadata,
|
||||
options: opts,
|
||||
ast: opts.ast === true ? file.ast : null,
|
||||
code: outputCode === undefined ? null : outputCode,
|
||||
map: outputMap === undefined ? null : outputMap,
|
||||
sourceType: file.ast.program.sourceType
|
||||
};
|
||||
}
|
||||
|
||||
function transformFile(file, pluginPasses) {
|
||||
for (const pluginPairs of pluginPasses) {
|
||||
const passPairs = [];
|
||||
const passes = [];
|
||||
const visitors = [];
|
||||
|
||||
for (const plugin of pluginPairs.concat([(0, _blockHoistPlugin.default)()])) {
|
||||
const pass = new _pluginPass.default(file, plugin.key, plugin.options);
|
||||
passPairs.push([plugin, pass]);
|
||||
passes.push(pass);
|
||||
visitors.push(plugin.visitor);
|
||||
}
|
||||
|
||||
for (const [plugin, pass] of passPairs) {
|
||||
const fn = plugin.pre;
|
||||
|
||||
if (fn) {
|
||||
const result = fn.call(pass, file);
|
||||
|
||||
if (isThenable(result)) {
|
||||
throw new Error(`You appear to be using an plugin with an async .pre, ` + `which your current version of Babel does not support.` + `If you're using a published plugin, you may need to upgrade ` + `your @babel/core version.`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const visitor = _traverse().default.visitors.merge(visitors, passes, file.opts.wrapPluginVisitorMethod);
|
||||
|
||||
(0, _traverse().default)(file.ast, visitor, file.scope);
|
||||
|
||||
for (const [plugin, pass] of passPairs) {
|
||||
const fn = plugin.post;
|
||||
|
||||
if (fn) {
|
||||
const result = fn.call(pass, file);
|
||||
|
||||
if (isThenable(result)) {
|
||||
throw new Error(`You appear to be using an plugin with an async .post, ` + `which your current version of Babel does not support.` + `If you're using a published plugin, you may need to upgrade ` + `your @babel/core version.`);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function isThenable(val) {
|
||||
return !!val && (typeof val === "object" || typeof val === "function") && !!val.then && typeof val.then === "function";
|
||||
}
|
@ -1,211 +0,0 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.default = normalizeFile;
|
||||
|
||||
function _path() {
|
||||
const data = _interopRequireDefault(require("path"));
|
||||
|
||||
_path = function () {
|
||||
return data;
|
||||
};
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
function _debug() {
|
||||
const data = _interopRequireDefault(require("debug"));
|
||||
|
||||
_debug = function () {
|
||||
return data;
|
||||
};
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
function _cloneDeep() {
|
||||
const data = _interopRequireDefault(require("lodash/cloneDeep"));
|
||||
|
||||
_cloneDeep = function () {
|
||||
return data;
|
||||
};
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
function t() {
|
||||
const data = _interopRequireWildcard(require("@babel/types"));
|
||||
|
||||
t = function () {
|
||||
return data;
|
||||
};
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
function _convertSourceMap() {
|
||||
const data = _interopRequireDefault(require("convert-source-map"));
|
||||
|
||||
_convertSourceMap = function () {
|
||||
return data;
|
||||
};
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
function _parser() {
|
||||
const data = require("@babel/parser");
|
||||
|
||||
_parser = function () {
|
||||
return data;
|
||||
};
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
function _codeFrame() {
|
||||
const data = require("@babel/code-frame");
|
||||
|
||||
_codeFrame = function () {
|
||||
return data;
|
||||
};
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
var _file = _interopRequireDefault(require("./file/file"));
|
||||
|
||||
var _missingPluginHelper = _interopRequireDefault(require("./util/missing-plugin-helper"));
|
||||
|
||||
function _interopRequireWildcard(obj) { if (obj && obj.__esModule) { return obj; } else { var newObj = {}; if (obj != null) { for (var key in obj) { if (Object.prototype.hasOwnProperty.call(obj, key)) { var desc = Object.defineProperty && Object.getOwnPropertyDescriptor ? Object.getOwnPropertyDescriptor(obj, key) : {}; if (desc.get || desc.set) { Object.defineProperty(newObj, key, desc); } else { newObj[key] = obj[key]; } } } } newObj.default = obj; return newObj; } }
|
||||
|
||||
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
||||
|
||||
const debug = (0, _debug().default)("babel:transform:file");
|
||||
|
||||
function normalizeFile(pluginPasses, options, code, ast) {
|
||||
code = `${code || ""}`;
|
||||
let inputMap = null;
|
||||
|
||||
if (options.inputSourceMap !== false) {
|
||||
if (typeof options.inputSourceMap === "object") {
|
||||
inputMap = _convertSourceMap().default.fromObject(options.inputSourceMap);
|
||||
}
|
||||
|
||||
if (!inputMap) {
|
||||
try {
|
||||
inputMap = _convertSourceMap().default.fromSource(code);
|
||||
|
||||
if (inputMap) {
|
||||
code = _convertSourceMap().default.removeComments(code);
|
||||
}
|
||||
} catch (err) {
|
||||
debug("discarding unknown inline input sourcemap", err);
|
||||
code = _convertSourceMap().default.removeComments(code);
|
||||
}
|
||||
}
|
||||
|
||||
if (!inputMap) {
|
||||
if (typeof options.filename === "string") {
|
||||
try {
|
||||
inputMap = _convertSourceMap().default.fromMapFileSource(code, _path().default.dirname(options.filename));
|
||||
|
||||
if (inputMap) {
|
||||
code = _convertSourceMap().default.removeMapFileComments(code);
|
||||
}
|
||||
} catch (err) {
|
||||
debug("discarding unknown file input sourcemap", err);
|
||||
code = _convertSourceMap().default.removeMapFileComments(code);
|
||||
}
|
||||
} else {
|
||||
debug("discarding un-loadable file input sourcemap");
|
||||
code = _convertSourceMap().default.removeMapFileComments(code);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (ast) {
|
||||
if (ast.type === "Program") {
|
||||
ast = t().file(ast, [], []);
|
||||
} else if (ast.type !== "File") {
|
||||
throw new Error("AST root must be a Program or File node");
|
||||
}
|
||||
|
||||
ast = (0, _cloneDeep().default)(ast);
|
||||
} else {
|
||||
ast = parser(pluginPasses, options, code);
|
||||
}
|
||||
|
||||
return new _file.default(options, {
|
||||
code,
|
||||
ast,
|
||||
inputMap
|
||||
});
|
||||
}
|
||||
|
||||
function parser(pluginPasses, {
|
||||
parserOpts,
|
||||
highlightCode = true,
|
||||
filename = "unknown"
|
||||
}, code) {
|
||||
try {
|
||||
const results = [];
|
||||
|
||||
for (const plugins of pluginPasses) {
|
||||
for (const plugin of plugins) {
|
||||
const {
|
||||
parserOverride
|
||||
} = plugin;
|
||||
|
||||
if (parserOverride) {
|
||||
const ast = parserOverride(code, parserOpts, _parser().parse);
|
||||
if (ast !== undefined) results.push(ast);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (results.length === 0) {
|
||||
return (0, _parser().parse)(code, parserOpts);
|
||||
} else if (results.length === 1) {
|
||||
if (typeof results[0].then === "function") {
|
||||
throw new Error(`You appear to be using an async codegen plugin, ` + `which your current version of Babel does not support. ` + `If you're using a published plugin, you may need to upgrade ` + `your @babel/core version.`);
|
||||
}
|
||||
|
||||
return results[0];
|
||||
}
|
||||
|
||||
throw new Error("More than one plugin attempted to override parsing.");
|
||||
} catch (err) {
|
||||
if (err.code === "BABEL_PARSER_SOURCETYPE_MODULE_REQUIRED") {
|
||||
err.message += "\nConsider renaming the file to '.mjs', or setting sourceType:module " + "or sourceType:unambiguous in your Babel config for this file.";
|
||||
}
|
||||
|
||||
const {
|
||||
loc,
|
||||
missingPlugin
|
||||
} = err;
|
||||
|
||||
if (loc) {
|
||||
const codeFrame = (0, _codeFrame().codeFrameColumns)(code, {
|
||||
start: {
|
||||
line: loc.line,
|
||||
column: loc.column + 1
|
||||
}
|
||||
}, {
|
||||
highlightCode
|
||||
});
|
||||
|
||||
if (missingPlugin) {
|
||||
err.message = `${filename}: ` + (0, _missingPluginHelper.default)(missingPlugin[0], loc, codeFrame);
|
||||
} else {
|
||||
err.message = `${filename}: ${err.message}\n\n` + codeFrame;
|
||||
}
|
||||
|
||||
err.code = "BABEL_PARSE_ERROR";
|
||||
}
|
||||
|
||||
throw err;
|
||||
}
|
||||
}
|
@ -1,65 +0,0 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.default = normalizeOptions;
|
||||
|
||||
function _path() {
|
||||
const data = _interopRequireDefault(require("path"));
|
||||
|
||||
_path = function () {
|
||||
return data;
|
||||
};
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
||||
|
||||
function normalizeOptions(config) {
|
||||
const {
|
||||
filename,
|
||||
cwd,
|
||||
filenameRelative = typeof filename === "string" ? _path().default.relative(cwd, filename) : "unknown",
|
||||
sourceType = "module",
|
||||
inputSourceMap,
|
||||
sourceMaps = !!inputSourceMap,
|
||||
moduleRoot,
|
||||
sourceRoot = moduleRoot,
|
||||
sourceFileName = _path().default.basename(filenameRelative),
|
||||
comments = true,
|
||||
compact = "auto"
|
||||
} = config.options;
|
||||
const opts = config.options;
|
||||
const options = Object.assign({}, opts, {
|
||||
parserOpts: Object.assign({
|
||||
sourceType: _path().default.extname(filenameRelative) === ".mjs" ? "module" : sourceType,
|
||||
sourceFileName: filename,
|
||||
plugins: []
|
||||
}, opts.parserOpts),
|
||||
generatorOpts: Object.assign({
|
||||
filename,
|
||||
auxiliaryCommentBefore: opts.auxiliaryCommentBefore,
|
||||
auxiliaryCommentAfter: opts.auxiliaryCommentAfter,
|
||||
retainLines: opts.retainLines,
|
||||
comments,
|
||||
shouldPrintComment: opts.shouldPrintComment,
|
||||
compact,
|
||||
minified: opts.minified,
|
||||
sourceMaps,
|
||||
sourceRoot,
|
||||
sourceFileName
|
||||
}, opts.generatorOpts)
|
||||
});
|
||||
|
||||
for (const plugins of config.passes) {
|
||||
for (const plugin of plugins) {
|
||||
if (plugin.manipulateOptions) {
|
||||
plugin.manipulateOptions(options, options.parserOpts);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return options;
|
||||
}
|
@ -1,48 +0,0 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.default = void 0;
|
||||
|
||||
class PluginPass {
|
||||
constructor(file, key, options) {
|
||||
this._map = new Map();
|
||||
this.key = key;
|
||||
this.file = file;
|
||||
this.opts = options || {};
|
||||
this.cwd = file.opts.cwd;
|
||||
this.filename = file.opts.filename;
|
||||
}
|
||||
|
||||
set(key, val) {
|
||||
this._map.set(key, val);
|
||||
}
|
||||
|
||||
get(key) {
|
||||
return this._map.get(key);
|
||||
}
|
||||
|
||||
availableHelper(name, versionRange) {
|
||||
return this.file.availableHelper(name, versionRange);
|
||||
}
|
||||
|
||||
addHelper(name) {
|
||||
return this.file.addHelper(name);
|
||||
}
|
||||
|
||||
addImport() {
|
||||
return this.file.addImport();
|
||||
}
|
||||
|
||||
getModuleName() {
|
||||
return this.file.getModuleName();
|
||||
}
|
||||
|
||||
buildCodeFrameError(node, msg, Error) {
|
||||
return this.file.buildCodeFrameError(node, msg, Error);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
exports.default = PluginPass;
|
@ -1,239 +0,0 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.default = generateMissingPluginMessage;
|
||||
const pluginNameMap = {
|
||||
classProperties: {
|
||||
syntax: {
|
||||
name: "@babel/plugin-syntax-class-properties",
|
||||
url: "https://git.io/vb4yQ"
|
||||
},
|
||||
transform: {
|
||||
name: "@babel/plugin-proposal-class-properties",
|
||||
url: "https://git.io/vb4SL"
|
||||
}
|
||||
},
|
||||
decorators: {
|
||||
syntax: {
|
||||
name: "@babel/plugin-syntax-decorators",
|
||||
url: "https://git.io/vb4y9"
|
||||
},
|
||||
transform: {
|
||||
name: "@babel/plugin-proposal-decorators",
|
||||
url: "https://git.io/vb4ST"
|
||||
}
|
||||
},
|
||||
doExpressions: {
|
||||
syntax: {
|
||||
name: "@babel/plugin-syntax-do-expressions",
|
||||
url: "https://git.io/vb4yh"
|
||||
},
|
||||
transform: {
|
||||
name: "@babel/plugin-proposal-do-expressions",
|
||||
url: "https://git.io/vb4S3"
|
||||
}
|
||||
},
|
||||
dynamicImport: {
|
||||
syntax: {
|
||||
name: "@babel/plugin-syntax-dynamic-import",
|
||||
url: "https://git.io/vb4Sv"
|
||||
}
|
||||
},
|
||||
exportDefaultFrom: {
|
||||
syntax: {
|
||||
name: "@babel/plugin-syntax-export-default-from",
|
||||
url: "https://git.io/vb4SO"
|
||||
},
|
||||
transform: {
|
||||
name: "@babel/plugin-proposal-export-default-from",
|
||||
url: "https://git.io/vb4yH"
|
||||
}
|
||||
},
|
||||
exportNamespaceFrom: {
|
||||
syntax: {
|
||||
name: "@babel/plugin-syntax-export-namespace-from",
|
||||
url: "https://git.io/vb4Sf"
|
||||
},
|
||||
transform: {
|
||||
name: "@babel/plugin-proposal-export-namespace-from",
|
||||
url: "https://git.io/vb4SG"
|
||||
}
|
||||
},
|
||||
flow: {
|
||||
syntax: {
|
||||
name: "@babel/plugin-syntax-flow",
|
||||
url: "https://git.io/vb4yb"
|
||||
},
|
||||
transform: {
|
||||
name: "@babel/plugin-transform-flow-strip-types",
|
||||
url: "https://git.io/vb49g"
|
||||
}
|
||||
},
|
||||
functionBind: {
|
||||
syntax: {
|
||||
name: "@babel/plugin-syntax-function-bind",
|
||||
url: "https://git.io/vb4y7"
|
||||
},
|
||||
transform: {
|
||||
name: "@babel/plugin-proposal-function-bind",
|
||||
url: "https://git.io/vb4St"
|
||||
}
|
||||
},
|
||||
functionSent: {
|
||||
syntax: {
|
||||
name: "@babel/plugin-syntax-function-sent",
|
||||
url: "https://git.io/vb4yN"
|
||||
},
|
||||
transform: {
|
||||
name: "@babel/plugin-proposal-function-sent",
|
||||
url: "https://git.io/vb4SZ"
|
||||
}
|
||||
},
|
||||
importMeta: {
|
||||
syntax: {
|
||||
name: "@babel/plugin-syntax-import-meta",
|
||||
url: "https://git.io/vbKK6"
|
||||
}
|
||||
},
|
||||
jsx: {
|
||||
syntax: {
|
||||
name: "@babel/plugin-syntax-jsx",
|
||||
url: "https://git.io/vb4yA"
|
||||
},
|
||||
transform: {
|
||||
name: "@babel/plugin-transform-react-jsx",
|
||||
url: "https://git.io/vb4yd"
|
||||
}
|
||||
},
|
||||
logicalAssignment: {
|
||||
syntax: {
|
||||
name: "@babel/plugin-syntax-logical-assignment-operators",
|
||||
url: "https://git.io/vAlBp"
|
||||
},
|
||||
transform: {
|
||||
name: "@babel/plugin-proposal-logical-assignment-operators",
|
||||
url: "https://git.io/vAlRe"
|
||||
}
|
||||
},
|
||||
nullishCoalescingOperator: {
|
||||
syntax: {
|
||||
name: "@babel/plugin-syntax-nullish-coalescing-operator",
|
||||
url: "https://git.io/vb4yx"
|
||||
},
|
||||
transform: {
|
||||
name: "@babel/plugin-proposal-nullish-coalescing-operator",
|
||||
url: "https://git.io/vb4Se"
|
||||
}
|
||||
},
|
||||
numericSeparator: {
|
||||
syntax: {
|
||||
name: "@babel/plugin-syntax-numeric-separator",
|
||||
url: "https://git.io/vb4Sq"
|
||||
},
|
||||
transform: {
|
||||
name: "@babel/plugin-proposal-numeric-separator",
|
||||
url: "https://git.io/vb4yS"
|
||||
}
|
||||
},
|
||||
optionalChaining: {
|
||||
syntax: {
|
||||
name: "@babel/plugin-syntax-optional-chaining",
|
||||
url: "https://git.io/vb4Sc"
|
||||
},
|
||||
transform: {
|
||||
name: "@babel/plugin-proposal-optional-chaining",
|
||||
url: "https://git.io/vb4Sk"
|
||||
}
|
||||
},
|
||||
pipelineOperator: {
|
||||
syntax: {
|
||||
name: "@babel/plugin-syntax-pipeline-operator",
|
||||
url: "https://git.io/vb4yj"
|
||||
},
|
||||
transform: {
|
||||
name: "@babel/plugin-proposal-pipeline-operator",
|
||||
url: "https://git.io/vb4SU"
|
||||
}
|
||||
},
|
||||
throwExpressions: {
|
||||
syntax: {
|
||||
name: "@babel/plugin-syntax-throw-expressions",
|
||||
url: "https://git.io/vb4SJ"
|
||||
},
|
||||
transform: {
|
||||
name: "@babel/plugin-proposal-throw-expressions",
|
||||
url: "https://git.io/vb4yF"
|
||||
}
|
||||
},
|
||||
typescript: {
|
||||
syntax: {
|
||||
name: "@babel/plugin-syntax-typescript",
|
||||
url: "https://git.io/vb4SC"
|
||||
},
|
||||
transform: {
|
||||
name: "@babel/plugin-transform-typescript",
|
||||
url: "https://git.io/vb4Sm"
|
||||
}
|
||||
},
|
||||
asyncGenerators: {
|
||||
syntax: {
|
||||
name: "@babel/plugin-syntax-async-generators",
|
||||
url: "https://git.io/vb4SY"
|
||||
},
|
||||
transform: {
|
||||
name: "@babel/plugin-proposal-async-generator-functions",
|
||||
url: "https://git.io/vb4yp"
|
||||
}
|
||||
},
|
||||
objectRestSpread: {
|
||||
syntax: {
|
||||
name: "@babel/plugin-syntax-object-rest-spread",
|
||||
url: "https://git.io/vb4y5"
|
||||
},
|
||||
transform: {
|
||||
name: "@babel/plugin-proposal-object-rest-spread",
|
||||
url: "https://git.io/vb4Ss"
|
||||
}
|
||||
},
|
||||
optionalCatchBinding: {
|
||||
syntax: {
|
||||
name: "@babel/plugin-syntax-optional-catch-binding",
|
||||
url: "https://git.io/vb4Sn"
|
||||
},
|
||||
transform: {
|
||||
name: "@babel/plugin-proposal-optional-catch-binding",
|
||||
url: "https://git.io/vb4SI"
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const getNameURLCombination = ({
|
||||
name,
|
||||
url
|
||||
}) => `${name} (${url})`;
|
||||
|
||||
function generateMissingPluginMessage(missingPluginName, loc, codeFrame) {
|
||||
let helpMessage = `Support for the experimental syntax '${missingPluginName}' isn't currently enabled ` + `(${loc.line}:${loc.column + 1}):\n\n` + codeFrame;
|
||||
const pluginInfo = pluginNameMap[missingPluginName];
|
||||
|
||||
if (pluginInfo) {
|
||||
const {
|
||||
syntax: syntaxPlugin,
|
||||
transform: transformPlugin
|
||||
} = pluginInfo;
|
||||
|
||||
if (syntaxPlugin) {
|
||||
if (transformPlugin) {
|
||||
const transformPluginInfo = getNameURLCombination(transformPlugin);
|
||||
helpMessage += `\n\nAdd ${transformPluginInfo} to the 'plugins' section of your Babel config ` + `to enable transformation.`;
|
||||
} else {
|
||||
const syntaxPluginInfo = getNameURLCombination(syntaxPlugin);
|
||||
helpMessage += `\n\nAdd ${syntaxPluginInfo} to the 'plugins' section of your Babel config ` + `to enable parsing.`;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return helpMessage;
|
||||
}
|
@ -1,301 +0,0 @@
|
||||
# Change Log
|
||||
|
||||
## 0.5.6
|
||||
|
||||
* Fix for regression when people were using numbers as names in source maps. See
|
||||
#236.
|
||||
|
||||
## 0.5.5
|
||||
|
||||
* Fix "regression" of unsupported, implementation behavior that half the world
|
||||
happens to have come to depend on. See #235.
|
||||
|
||||
* Fix regression involving function hoisting in SpiderMonkey. See #233.
|
||||
|
||||
## 0.5.4
|
||||
|
||||
* Large performance improvements to source-map serialization. See #228 and #229.
|
||||
|
||||
## 0.5.3
|
||||
|
||||
* Do not include unnecessary distribution files. See
|
||||
commit ef7006f8d1647e0a83fdc60f04f5a7ca54886f86.
|
||||
|
||||
## 0.5.2
|
||||
|
||||
* Include browser distributions of the library in package.json's `files`. See
|
||||
issue #212.
|
||||
|
||||
## 0.5.1
|
||||
|
||||
* Fix latent bugs in IndexedSourceMapConsumer.prototype._parseMappings. See
|
||||
ff05274becc9e6e1295ed60f3ea090d31d843379.
|
||||
|
||||
## 0.5.0
|
||||
|
||||
* Node 0.8 is no longer supported.
|
||||
|
||||
* Use webpack instead of dryice for bundling.
|
||||
|
||||
* Big speedups serializing source maps. See pull request #203.
|
||||
|
||||
* Fix a bug with `SourceMapConsumer.prototype.sourceContentFor` and sources that
|
||||
explicitly start with the source root. See issue #199.
|
||||
|
||||
## 0.4.4
|
||||
|
||||
* Fix an issue where using a `SourceMapGenerator` after having created a
|
||||
`SourceMapConsumer` from it via `SourceMapConsumer.fromSourceMap` failed. See
|
||||
issue #191.
|
||||
|
||||
* Fix an issue with where `SourceMapGenerator` would mistakenly consider
|
||||
different mappings as duplicates of each other and avoid generating them. See
|
||||
issue #192.
|
||||
|
||||
## 0.4.3
|
||||
|
||||
* A very large number of performance improvements, particularly when parsing
|
||||
source maps. Collectively about 75% of time shaved off of the source map
|
||||
parsing benchmark!
|
||||
|
||||
* Fix a bug in `SourceMapConsumer.prototype.allGeneratedPositionsFor` and fuzzy
|
||||
searching in the presence of a column option. See issue #177.
|
||||
|
||||
* Fix a bug with joining a source and its source root when the source is above
|
||||
the root. See issue #182.
|
||||
|
||||
* Add the `SourceMapConsumer.prototype.hasContentsOfAllSources` method to
|
||||
determine when all sources' contents are inlined into the source map. See
|
||||
issue #190.
|
||||
|
||||
## 0.4.2
|
||||
|
||||
* Add an `.npmignore` file so that the benchmarks aren't pulled down by
|
||||
dependent projects. Issue #169.
|
||||
|
||||
* Add an optional `column` argument to
|
||||
`SourceMapConsumer.prototype.allGeneratedPositionsFor` and better handle lines
|
||||
with no mappings. Issues #172 and #173.
|
||||
|
||||
## 0.4.1
|
||||
|
||||
* Fix accidentally defining a global variable. #170.
|
||||
|
||||
## 0.4.0
|
||||
|
||||
* The default direction for fuzzy searching was changed back to its original
|
||||
direction. See #164.
|
||||
|
||||
* There is now a `bias` option you can supply to `SourceMapConsumer` to control
|
||||
the fuzzy searching direction. See #167.
|
||||
|
||||
* About an 8% speed up in parsing source maps. See #159.
|
||||
|
||||
* Added a benchmark for parsing and generating source maps.
|
||||
|
||||
## 0.3.0
|
||||
|
||||
* Change the default direction that searching for positions fuzzes when there is
|
||||
not an exact match. See #154.
|
||||
|
||||
* Support for environments using json2.js for JSON serialization. See #156.
|
||||
|
||||
## 0.2.0
|
||||
|
||||
* Support for consuming "indexed" source maps which do not have any remote
|
||||
sections. See pull request #127. This introduces a minor backwards
|
||||
incompatibility if you are monkey patching `SourceMapConsumer.prototype`
|
||||
methods.
|
||||
|
||||
## 0.1.43
|
||||
|
||||
* Performance improvements for `SourceMapGenerator` and `SourceNode`. See issue
|
||||
#148 for some discussion and issues #150, #151, and #152 for implementations.
|
||||
|
||||
## 0.1.42
|
||||
|
||||
* Fix an issue where `SourceNode`s from different versions of the source-map
|
||||
library couldn't be used in conjunction with each other. See issue #142.
|
||||
|
||||
## 0.1.41
|
||||
|
||||
* Fix a bug with getting the source content of relative sources with a "./"
|
||||
prefix. See issue #145 and [Bug 1090768](bugzil.la/1090768).
|
||||
|
||||
* Add the `SourceMapConsumer.prototype.computeColumnSpans` method to compute the
|
||||
column span of each mapping.
|
||||
|
||||
* Add the `SourceMapConsumer.prototype.allGeneratedPositionsFor` method to find
|
||||
all generated positions associated with a given original source and line.
|
||||
|
||||
## 0.1.40
|
||||
|
||||
* Performance improvements for parsing source maps in SourceMapConsumer.
|
||||
|
||||
## 0.1.39
|
||||
|
||||
* Fix a bug where setting a source's contents to null before any source content
|
||||
had been set before threw a TypeError. See issue #131.
|
||||
|
||||
## 0.1.38
|
||||
|
||||
* Fix a bug where finding relative paths from an empty path were creating
|
||||
absolute paths. See issue #129.
|
||||
|
||||
## 0.1.37
|
||||
|
||||
* Fix a bug where if the source root was an empty string, relative source paths
|
||||
would turn into absolute source paths. Issue #124.
|
||||
|
||||
## 0.1.36
|
||||
|
||||
* Allow the `names` mapping property to be an empty string. Issue #121.
|
||||
|
||||
## 0.1.35
|
||||
|
||||
* A third optional parameter was added to `SourceNode.fromStringWithSourceMap`
|
||||
to specify a path that relative sources in the second parameter should be
|
||||
relative to. Issue #105.
|
||||
|
||||
* If no file property is given to a `SourceMapGenerator`, then the resulting
|
||||
source map will no longer have a `null` file property. The property will
|
||||
simply not exist. Issue #104.
|
||||
|
||||
* Fixed a bug where consecutive newlines were ignored in `SourceNode`s.
|
||||
Issue #116.
|
||||
|
||||
## 0.1.34
|
||||
|
||||
* Make `SourceNode` work with windows style ("\r\n") newlines. Issue #103.
|
||||
|
||||
* Fix bug involving source contents and the
|
||||
`SourceMapGenerator.prototype.applySourceMap`. Issue #100.
|
||||
|
||||
## 0.1.33
|
||||
|
||||
* Fix some edge cases surrounding path joining and URL resolution.
|
||||
|
||||
* Add a third parameter for relative path to
|
||||
`SourceMapGenerator.prototype.applySourceMap`.
|
||||
|
||||
* Fix issues with mappings and EOLs.
|
||||
|
||||
## 0.1.32
|
||||
|
||||
* Fixed a bug where SourceMapConsumer couldn't handle negative relative columns
|
||||
(issue 92).
|
||||
|
||||
* Fixed test runner to actually report number of failed tests as its process
|
||||
exit code.
|
||||
|
||||
* Fixed a typo when reporting bad mappings (issue 87).
|
||||
|
||||
## 0.1.31
|
||||
|
||||
* Delay parsing the mappings in SourceMapConsumer until queried for a source
|
||||
location.
|
||||
|
||||
* Support Sass source maps (which at the time of writing deviate from the spec
|
||||
in small ways) in SourceMapConsumer.
|
||||
|
||||
## 0.1.30
|
||||
|
||||
* Do not join source root with a source, when the source is a data URI.
|
||||
|
||||
* Extend the test runner to allow running single specific test files at a time.
|
||||
|
||||
* Performance improvements in `SourceNode.prototype.walk` and
|
||||
`SourceMapConsumer.prototype.eachMapping`.
|
||||
|
||||
* Source map browser builds will now work inside Workers.
|
||||
|
||||
* Better error messages when attempting to add an invalid mapping to a
|
||||
`SourceMapGenerator`.
|
||||
|
||||
## 0.1.29
|
||||
|
||||
* Allow duplicate entries in the `names` and `sources` arrays of source maps
|
||||
(usually from TypeScript) we are parsing. Fixes github issue 72.
|
||||
|
||||
## 0.1.28
|
||||
|
||||
* Skip duplicate mappings when creating source maps from SourceNode; github
|
||||
issue 75.
|
||||
|
||||
## 0.1.27
|
||||
|
||||
* Don't throw an error when the `file` property is missing in SourceMapConsumer,
|
||||
we don't use it anyway.
|
||||
|
||||
## 0.1.26
|
||||
|
||||
* Fix SourceNode.fromStringWithSourceMap for empty maps. Fixes github issue 70.
|
||||
|
||||
## 0.1.25
|
||||
|
||||
* Make compatible with browserify
|
||||
|
||||
## 0.1.24
|
||||
|
||||
* Fix issue with absolute paths and `file://` URIs. See
|
||||
https://bugzilla.mozilla.org/show_bug.cgi?id=885597
|
||||
|
||||
## 0.1.23
|
||||
|
||||
* Fix issue with absolute paths and sourcesContent, github issue 64.
|
||||
|
||||
## 0.1.22
|
||||
|
||||
* Ignore duplicate mappings in SourceMapGenerator. Fixes github issue 21.
|
||||
|
||||
## 0.1.21
|
||||
|
||||
* Fixed handling of sources that start with a slash so that they are relative to
|
||||
the source root's host.
|
||||
|
||||
## 0.1.20
|
||||
|
||||
* Fixed github issue #43: absolute URLs aren't joined with the source root
|
||||
anymore.
|
||||
|
||||
## 0.1.19
|
||||
|
||||
* Using Travis CI to run tests.
|
||||
|
||||
## 0.1.18
|
||||
|
||||
* Fixed a bug in the handling of sourceRoot.
|
||||
|
||||
## 0.1.17
|
||||
|
||||
* Added SourceNode.fromStringWithSourceMap.
|
||||
|
||||
## 0.1.16
|
||||
|
||||
* Added missing documentation.
|
||||
|
||||
* Fixed the generating of empty mappings in SourceNode.
|
||||
|
||||
## 0.1.15
|
||||
|
||||
* Added SourceMapGenerator.applySourceMap.
|
||||
|
||||
## 0.1.14
|
||||
|
||||
* The sourceRoot is now handled consistently.
|
||||
|
||||
## 0.1.13
|
||||
|
||||
* Added SourceMapGenerator.fromSourceMap.
|
||||
|
||||
## 0.1.12
|
||||
|
||||
* SourceNode now generates empty mappings too.
|
||||
|
||||
## 0.1.11
|
||||
|
||||
* Added name support to SourceNode.
|
||||
|
||||
## 0.1.10
|
||||
|
||||
* Added sourcesContent support to the customer and generator.
|
@ -1,28 +0,0 @@
|
||||
|
||||
Copyright (c) 2009-2011, Mozilla Foundation and contributors
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are met:
|
||||
|
||||
* Redistributions of source code must retain the above copyright notice, this
|
||||
list of conditions and the following disclaimer.
|
||||
|
||||
* Redistributions in binary form must reproduce the above copyright notice,
|
||||
this list of conditions and the following disclaimer in the documentation
|
||||
and/or other materials provided with the distribution.
|
||||
|
||||
* Neither the names of the Mozilla Foundation nor the names of project
|
||||
contributors may be used to endorse or promote products derived from this
|
||||
software without specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
|
||||
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
|
||||
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
||||
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
||||
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
|
||||
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
@ -1,729 +0,0 @@
|
||||
# Source Map
|
||||
|
||||
[![Build Status](https://travis-ci.org/mozilla/source-map.png?branch=master)](https://travis-ci.org/mozilla/source-map)
|
||||
|
||||
[![NPM](https://nodei.co/npm/source-map.png?downloads=true&downloadRank=true)](https://www.npmjs.com/package/source-map)
|
||||
|
||||
This is a library to generate and consume the source map format
|
||||
[described here][format].
|
||||
|
||||
[format]: https://docs.google.com/document/d/1U1RGAehQwRypUTovF1KRlpiOFze0b-_2gc6fAH0KY0k/edit
|
||||
|
||||
## Use with Node
|
||||
|
||||
$ npm install source-map
|
||||
|
||||
## Use on the Web
|
||||
|
||||
<script src="https://raw.githubusercontent.com/mozilla/source-map/master/dist/source-map.min.js" defer></script>
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
|
||||
<!-- `npm run toc` to regenerate the Table of Contents -->
|
||||
|
||||
<!-- START doctoc generated TOC please keep comment here to allow auto update -->
|
||||
<!-- DON'T EDIT THIS SECTION, INSTEAD RE-RUN doctoc TO UPDATE -->
|
||||
## Table of Contents
|
||||
|
||||
- [Examples](#examples)
|
||||
- [Consuming a source map](#consuming-a-source-map)
|
||||
- [Generating a source map](#generating-a-source-map)
|
||||
- [With SourceNode (high level API)](#with-sourcenode-high-level-api)
|
||||
- [With SourceMapGenerator (low level API)](#with-sourcemapgenerator-low-level-api)
|
||||
- [API](#api)
|
||||
- [SourceMapConsumer](#sourcemapconsumer)
|
||||
- [new SourceMapConsumer(rawSourceMap)](#new-sourcemapconsumerrawsourcemap)
|
||||
- [SourceMapConsumer.prototype.computeColumnSpans()](#sourcemapconsumerprototypecomputecolumnspans)
|
||||
- [SourceMapConsumer.prototype.originalPositionFor(generatedPosition)](#sourcemapconsumerprototypeoriginalpositionforgeneratedposition)
|
||||
- [SourceMapConsumer.prototype.generatedPositionFor(originalPosition)](#sourcemapconsumerprototypegeneratedpositionfororiginalposition)
|
||||
- [SourceMapConsumer.prototype.allGeneratedPositionsFor(originalPosition)](#sourcemapconsumerprototypeallgeneratedpositionsfororiginalposition)
|
||||
- [SourceMapConsumer.prototype.hasContentsOfAllSources()](#sourcemapconsumerprototypehascontentsofallsources)
|
||||
- [SourceMapConsumer.prototype.sourceContentFor(source[, returnNullOnMissing])](#sourcemapconsumerprototypesourcecontentforsource-returnnullonmissing)
|
||||
- [SourceMapConsumer.prototype.eachMapping(callback, context, order)](#sourcemapconsumerprototypeeachmappingcallback-context-order)
|
||||
- [SourceMapGenerator](#sourcemapgenerator)
|
||||
- [new SourceMapGenerator([startOfSourceMap])](#new-sourcemapgeneratorstartofsourcemap)
|
||||
- [SourceMapGenerator.fromSourceMap(sourceMapConsumer)](#sourcemapgeneratorfromsourcemapsourcemapconsumer)
|
||||
- [SourceMapGenerator.prototype.addMapping(mapping)](#sourcemapgeneratorprototypeaddmappingmapping)
|
||||
- [SourceMapGenerator.prototype.setSourceContent(sourceFile, sourceContent)](#sourcemapgeneratorprototypesetsourcecontentsourcefile-sourcecontent)
|
||||
- [SourceMapGenerator.prototype.applySourceMap(sourceMapConsumer[, sourceFile[, sourceMapPath]])](#sourcemapgeneratorprototypeapplysourcemapsourcemapconsumer-sourcefile-sourcemappath)
|
||||
- [SourceMapGenerator.prototype.toString()](#sourcemapgeneratorprototypetostring)
|
||||
- [SourceNode](#sourcenode)
|
||||
- [new SourceNode([line, column, source[, chunk[, name]]])](#new-sourcenodeline-column-source-chunk-name)
|
||||
- [SourceNode.fromStringWithSourceMap(code, sourceMapConsumer[, relativePath])](#sourcenodefromstringwithsourcemapcode-sourcemapconsumer-relativepath)
|
||||
- [SourceNode.prototype.add(chunk)](#sourcenodeprototypeaddchunk)
|
||||
- [SourceNode.prototype.prepend(chunk)](#sourcenodeprototypeprependchunk)
|
||||
- [SourceNode.prototype.setSourceContent(sourceFile, sourceContent)](#sourcenodeprototypesetsourcecontentsourcefile-sourcecontent)
|
||||
- [SourceNode.prototype.walk(fn)](#sourcenodeprototypewalkfn)
|
||||
- [SourceNode.prototype.walkSourceContents(fn)](#sourcenodeprototypewalksourcecontentsfn)
|
||||
- [SourceNode.prototype.join(sep)](#sourcenodeprototypejoinsep)
|
||||
- [SourceNode.prototype.replaceRight(pattern, replacement)](#sourcenodeprototypereplacerightpattern-replacement)
|
||||
- [SourceNode.prototype.toString()](#sourcenodeprototypetostring)
|
||||
- [SourceNode.prototype.toStringWithSourceMap([startOfSourceMap])](#sourcenodeprototypetostringwithsourcemapstartofsourcemap)
|
||||
|
||||
<!-- END doctoc generated TOC please keep comment here to allow auto update -->
|
||||
|
||||
## Examples
|
||||
|
||||
### Consuming a source map
|
||||
|
||||
```js
|
||||
var rawSourceMap = {
|
||||
version: 3,
|
||||
file: 'min.js',
|
||||
names: ['bar', 'baz', 'n'],
|
||||
sources: ['one.js', 'two.js'],
|
||||
sourceRoot: 'http://example.com/www/js/',
|
||||
mappings: 'CAAC,IAAI,IAAM,SAAUA,GAClB,OAAOC,IAAID;CCDb,IAAI,IAAM,SAAUE,GAClB,OAAOA'
|
||||
};
|
||||
|
||||
var smc = new SourceMapConsumer(rawSourceMap);
|
||||
|
||||
console.log(smc.sources);
|
||||
// [ 'http://example.com/www/js/one.js',
|
||||
// 'http://example.com/www/js/two.js' ]
|
||||
|
||||
console.log(smc.originalPositionFor({
|
||||
line: 2,
|
||||
column: 28
|
||||
}));
|
||||
// { source: 'http://example.com/www/js/two.js',
|
||||
// line: 2,
|
||||
// column: 10,
|
||||
// name: 'n' }
|
||||
|
||||
console.log(smc.generatedPositionFor({
|
||||
source: 'http://example.com/www/js/two.js',
|
||||
line: 2,
|
||||
column: 10
|
||||
}));
|
||||
// { line: 2, column: 28 }
|
||||
|
||||
smc.eachMapping(function (m) {
|
||||
// ...
|
||||
});
|
||||
```
|
||||
|
||||
### Generating a source map
|
||||
|
||||
In depth guide:
|
||||
[**Compiling to JavaScript, and Debugging with Source Maps**](https://hacks.mozilla.org/2013/05/compiling-to-javascript-and-debugging-with-source-maps/)
|
||||
|
||||
#### With SourceNode (high level API)
|
||||
|
||||
```js
|
||||
function compile(ast) {
|
||||
switch (ast.type) {
|
||||
case 'BinaryExpression':
|
||||
return new SourceNode(
|
||||
ast.location.line,
|
||||
ast.location.column,
|
||||
ast.location.source,
|
||||
[compile(ast.left), " + ", compile(ast.right)]
|
||||
);
|
||||
case 'Literal':
|
||||
return new SourceNode(
|
||||
ast.location.line,
|
||||
ast.location.column,
|
||||
ast.location.source,
|
||||
String(ast.value)
|
||||
);
|
||||
// ...
|
||||
default:
|
||||
throw new Error("Bad AST");
|
||||
}
|
||||
}
|
||||
|
||||
var ast = parse("40 + 2", "add.js");
|
||||
console.log(compile(ast).toStringWithSourceMap({
|
||||
file: 'add.js'
|
||||
}));
|
||||
// { code: '40 + 2',
|
||||
// map: [object SourceMapGenerator] }
|
||||
```
|
||||
|
||||
#### With SourceMapGenerator (low level API)
|
||||
|
||||
```js
|
||||
var map = new SourceMapGenerator({
|
||||
file: "source-mapped.js"
|
||||
});
|
||||
|
||||
map.addMapping({
|
||||
generated: {
|
||||
line: 10,
|
||||
column: 35
|
||||
},
|
||||
source: "foo.js",
|
||||
original: {
|
||||
line: 33,
|
||||
column: 2
|
||||
},
|
||||
name: "christopher"
|
||||
});
|
||||
|
||||
console.log(map.toString());
|
||||
// '{"version":3,"file":"source-mapped.js","sources":["foo.js"],"names":["christopher"],"mappings":";;;;;;;;;mCAgCEA"}'
|
||||
```
|
||||
|
||||
## API
|
||||
|
||||
Get a reference to the module:
|
||||
|
||||
```js
|
||||
// Node.js
|
||||
var sourceMap = require('source-map');
|
||||
|
||||
// Browser builds
|
||||
var sourceMap = window.sourceMap;
|
||||
|
||||
// Inside Firefox
|
||||
const sourceMap = require("devtools/toolkit/sourcemap/source-map.js");
|
||||
```
|
||||
|
||||
### SourceMapConsumer
|
||||
|
||||
A SourceMapConsumer instance represents a parsed source map which we can query
|
||||
for information about the original file positions by giving it a file position
|
||||
in the generated source.
|
||||
|
||||
#### new SourceMapConsumer(rawSourceMap)
|
||||
|
||||
The only parameter is the raw source map (either as a string which can be
|
||||
`JSON.parse`'d, or an object). According to the spec, source maps have the
|
||||
following attributes:
|
||||
|
||||
* `version`: Which version of the source map spec this map is following.
|
||||
|
||||
* `sources`: An array of URLs to the original source files.
|
||||
|
||||
* `names`: An array of identifiers which can be referenced by individual
|
||||
mappings.
|
||||
|
||||
* `sourceRoot`: Optional. The URL root from which all sources are relative.
|
||||
|
||||
* `sourcesContent`: Optional. An array of contents of the original source files.
|
||||
|
||||
* `mappings`: A string of base64 VLQs which contain the actual mappings.
|
||||
|
||||
* `file`: Optional. The generated filename this source map is associated with.
|
||||
|
||||
```js
|
||||
var consumer = new sourceMap.SourceMapConsumer(rawSourceMapJsonData);
|
||||
```
|
||||
|
||||
#### SourceMapConsumer.prototype.computeColumnSpans()
|
||||
|
||||
Compute the last column for each generated mapping. The last column is
|
||||
inclusive.
|
||||
|
||||
```js
|
||||
// Before:
|
||||
consumer.allGeneratedPositionsFor({ line: 2, source: "foo.coffee" })
|
||||
// [ { line: 2,
|
||||
// column: 1 },
|
||||
// { line: 2,
|
||||
// column: 10 },
|
||||
// { line: 2,
|
||||
// column: 20 } ]
|
||||
|
||||
consumer.computeColumnSpans();
|
||||
|
||||
// After:
|
||||
consumer.allGeneratedPositionsFor({ line: 2, source: "foo.coffee" })
|
||||
// [ { line: 2,
|
||||
// column: 1,
|
||||
// lastColumn: 9 },
|
||||
// { line: 2,
|
||||
// column: 10,
|
||||
// lastColumn: 19 },
|
||||
// { line: 2,
|
||||
// column: 20,
|
||||
// lastColumn: Infinity } ]
|
||||
|
||||
```
|
||||
|
||||
#### SourceMapConsumer.prototype.originalPositionFor(generatedPosition)
|
||||
|
||||
Returns the original source, line, and column information for the generated
|
||||
source's line and column positions provided. The only argument is an object with
|
||||
the following properties:
|
||||
|
||||
* `line`: The line number in the generated source.
|
||||
|
||||
* `column`: The column number in the generated source.
|
||||
|
||||
* `bias`: Either `SourceMapConsumer.GREATEST_LOWER_BOUND` or
|
||||
`SourceMapConsumer.LEAST_UPPER_BOUND`. Specifies whether to return the closest
|
||||
element that is smaller than or greater than the one we are searching for,
|
||||
respectively, if the exact element cannot be found. Defaults to
|
||||
`SourceMapConsumer.GREATEST_LOWER_BOUND`.
|
||||
|
||||
and an object is returned with the following properties:
|
||||
|
||||
* `source`: The original source file, or null if this information is not
|
||||
available.
|
||||
|
||||
* `line`: The line number in the original source, or null if this information is
|
||||
not available.
|
||||
|
||||
* `column`: The column number in the original source, or null if this
|
||||
information is not available.
|
||||
|
||||
* `name`: The original identifier, or null if this information is not available.
|
||||
|
||||
```js
|
||||
consumer.originalPositionFor({ line: 2, column: 10 })
|
||||
// { source: 'foo.coffee',
|
||||
// line: 2,
|
||||
// column: 2,
|
||||
// name: null }
|
||||
|
||||
consumer.originalPositionFor({ line: 99999999999999999, column: 999999999999999 })
|
||||
// { source: null,
|
||||
// line: null,
|
||||
// column: null,
|
||||
// name: null }
|
||||
```
|
||||
|
||||
#### SourceMapConsumer.prototype.generatedPositionFor(originalPosition)
|
||||
|
||||
Returns the generated line and column information for the original source,
|
||||
line, and column positions provided. The only argument is an object with
|
||||
the following properties:
|
||||
|
||||
* `source`: The filename of the original source.
|
||||
|
||||
* `line`: The line number in the original source.
|
||||
|
||||
* `column`: The column number in the original source.
|
||||
|
||||
and an object is returned with the following properties:
|
||||
|
||||
* `line`: The line number in the generated source, or null.
|
||||
|
||||
* `column`: The column number in the generated source, or null.
|
||||
|
||||
```js
|
||||
consumer.generatedPositionFor({ source: "example.js", line: 2, column: 10 })
|
||||
// { line: 1,
|
||||
// column: 56 }
|
||||
```
|
||||
|
||||
#### SourceMapConsumer.prototype.allGeneratedPositionsFor(originalPosition)
|
||||
|
||||
Returns all generated line and column information for the original source, line,
|
||||
and column provided. If no column is provided, returns all mappings
|
||||
corresponding to a either the line we are searching for or the next closest line
|
||||
that has any mappings. Otherwise, returns all mappings corresponding to the
|
||||
given line and either the column we are searching for or the next closest column
|
||||
that has any offsets.
|
||||
|
||||
The only argument is an object with the following properties:
|
||||
|
||||
* `source`: The filename of the original source.
|
||||
|
||||
* `line`: The line number in the original source.
|
||||
|
||||
* `column`: Optional. The column number in the original source.
|
||||
|
||||
and an array of objects is returned, each with the following properties:
|
||||
|
||||
* `line`: The line number in the generated source, or null.
|
||||
|
||||
* `column`: The column number in the generated source, or null.
|
||||
|
||||
```js
|
||||
consumer.allGeneratedpositionsfor({ line: 2, source: "foo.coffee" })
|
||||
// [ { line: 2,
|
||||
// column: 1 },
|
||||
// { line: 2,
|
||||
// column: 10 },
|
||||
// { line: 2,
|
||||
// column: 20 } ]
|
||||
```
|
||||
|
||||
#### SourceMapConsumer.prototype.hasContentsOfAllSources()
|
||||
|
||||
Return true if we have the embedded source content for every source listed in
|
||||
the source map, false otherwise.
|
||||
|
||||
In other words, if this method returns `true`, then
|
||||
`consumer.sourceContentFor(s)` will succeed for every source `s` in
|
||||
`consumer.sources`.
|
||||
|
||||
```js
|
||||
// ...
|
||||
if (consumer.hasContentsOfAllSources()) {
|
||||
consumerReadyCallback(consumer);
|
||||
} else {
|
||||
fetchSources(consumer, consumerReadyCallback);
|
||||
}
|
||||
// ...
|
||||
```
|
||||
|
||||
#### SourceMapConsumer.prototype.sourceContentFor(source[, returnNullOnMissing])
|
||||
|
||||
Returns the original source content for the source provided. The only
|
||||
argument is the URL of the original source file.
|
||||
|
||||
If the source content for the given source is not found, then an error is
|
||||
thrown. Optionally, pass `true` as the second param to have `null` returned
|
||||
instead.
|
||||
|
||||
```js
|
||||
consumer.sources
|
||||
// [ "my-cool-lib.clj" ]
|
||||
|
||||
consumer.sourceContentFor("my-cool-lib.clj")
|
||||
// "..."
|
||||
|
||||
consumer.sourceContentFor("this is not in the source map");
|
||||
// Error: "this is not in the source map" is not in the source map
|
||||
|
||||
consumer.sourceContentFor("this is not in the source map", true);
|
||||
// null
|
||||
```
|
||||
|
||||
#### SourceMapConsumer.prototype.eachMapping(callback, context, order)
|
||||
|
||||
Iterate over each mapping between an original source/line/column and a
|
||||
generated line/column in this source map.
|
||||
|
||||
* `callback`: The function that is called with each mapping. Mappings have the
|
||||
form `{ source, generatedLine, generatedColumn, originalLine, originalColumn,
|
||||
name }`
|
||||
|
||||
* `context`: Optional. If specified, this object will be the value of `this`
|
||||
every time that `callback` is called.
|
||||
|
||||
* `order`: Either `SourceMapConsumer.GENERATED_ORDER` or
|
||||
`SourceMapConsumer.ORIGINAL_ORDER`. Specifies whether you want to iterate over
|
||||
the mappings sorted by the generated file's line/column order or the
|
||||
original's source/line/column order, respectively. Defaults to
|
||||
`SourceMapConsumer.GENERATED_ORDER`.
|
||||
|
||||
```js
|
||||
consumer.eachMapping(function (m) { console.log(m); })
|
||||
// ...
|
||||
// { source: 'illmatic.js',
|
||||
// generatedLine: 1,
|
||||
// generatedColumn: 0,
|
||||
// originalLine: 1,
|
||||
// originalColumn: 0,
|
||||
// name: null }
|
||||
// { source: 'illmatic.js',
|
||||
// generatedLine: 2,
|
||||
// generatedColumn: 0,
|
||||
// originalLine: 2,
|
||||
// originalColumn: 0,
|
||||
// name: null }
|
||||
// ...
|
||||
```
|
||||
### SourceMapGenerator
|
||||
|
||||
An instance of the SourceMapGenerator represents a source map which is being
|
||||
built incrementally.
|
||||
|
||||
#### new SourceMapGenerator([startOfSourceMap])
|
||||
|
||||
You may pass an object with the following properties:
|
||||
|
||||
* `file`: The filename of the generated source that this source map is
|
||||
associated with.
|
||||
|
||||
* `sourceRoot`: A root for all relative URLs in this source map.
|
||||
|
||||
* `skipValidation`: Optional. When `true`, disables validation of mappings as
|
||||
they are added. This can improve performance but should be used with
|
||||
discretion, as a last resort. Even then, one should avoid using this flag when
|
||||
running tests, if possible.
|
||||
|
||||
```js
|
||||
var generator = new sourceMap.SourceMapGenerator({
|
||||
file: "my-generated-javascript-file.js",
|
||||
sourceRoot: "http://example.com/app/js/"
|
||||
});
|
||||
```
|
||||
|
||||
#### SourceMapGenerator.fromSourceMap(sourceMapConsumer)
|
||||
|
||||
Creates a new `SourceMapGenerator` from an existing `SourceMapConsumer` instance.
|
||||
|
||||
* `sourceMapConsumer` The SourceMap.
|
||||
|
||||
```js
|
||||
var generator = sourceMap.SourceMapGenerator.fromSourceMap(consumer);
|
||||
```
|
||||
|
||||
#### SourceMapGenerator.prototype.addMapping(mapping)
|
||||
|
||||
Add a single mapping from original source line and column to the generated
|
||||
source's line and column for this source map being created. The mapping object
|
||||
should have the following properties:
|
||||
|
||||
* `generated`: An object with the generated line and column positions.
|
||||
|
||||
* `original`: An object with the original line and column positions.
|
||||
|
||||
* `source`: The original source file (relative to the sourceRoot).
|
||||
|
||||
* `name`: An optional original token name for this mapping.
|
||||
|
||||
```js
|
||||
generator.addMapping({
|
||||
source: "module-one.scm",
|
||||
original: { line: 128, column: 0 },
|
||||
generated: { line: 3, column: 456 }
|
||||
})
|
||||
```
|
||||
|
||||
#### SourceMapGenerator.prototype.setSourceContent(sourceFile, sourceContent)
|
||||
|
||||
Set the source content for an original source file.
|
||||
|
||||
* `sourceFile` the URL of the original source file.
|
||||
|
||||
* `sourceContent` the content of the source file.
|
||||
|
||||
```js
|
||||
generator.setSourceContent("module-one.scm",
|
||||
fs.readFileSync("path/to/module-one.scm"))
|
||||
```
|
||||
|
||||
#### SourceMapGenerator.prototype.applySourceMap(sourceMapConsumer[, sourceFile[, sourceMapPath]])
|
||||
|
||||
Applies a SourceMap for a source file to the SourceMap.
|
||||
Each mapping to the supplied source file is rewritten using the
|
||||
supplied SourceMap. Note: The resolution for the resulting mappings
|
||||
is the minimum of this map and the supplied map.
|
||||
|
||||
* `sourceMapConsumer`: The SourceMap to be applied.
|
||||
|
||||
* `sourceFile`: Optional. The filename of the source file.
|
||||
If omitted, sourceMapConsumer.file will be used, if it exists.
|
||||
Otherwise an error will be thrown.
|
||||
|
||||
* `sourceMapPath`: Optional. The dirname of the path to the SourceMap
|
||||
to be applied. If relative, it is relative to the SourceMap.
|
||||
|
||||
This parameter is needed when the two SourceMaps aren't in the same
|
||||
directory, and the SourceMap to be applied contains relative source
|
||||
paths. If so, those relative source paths need to be rewritten
|
||||
relative to the SourceMap.
|
||||
|
||||
If omitted, it is assumed that both SourceMaps are in the same directory,
|
||||
thus not needing any rewriting. (Supplying `'.'` has the same effect.)
|
||||
|
||||
#### SourceMapGenerator.prototype.toString()
|
||||
|
||||
Renders the source map being generated to a string.
|
||||
|
||||
```js
|
||||
generator.toString()
|
||||
// '{"version":3,"sources":["module-one.scm"],"names":[],"mappings":"...snip...","file":"my-generated-javascript-file.js","sourceRoot":"http://example.com/app/js/"}'
|
||||
```
|
||||
|
||||
### SourceNode
|
||||
|
||||
SourceNodes provide a way to abstract over interpolating and/or concatenating
|
||||
snippets of generated JavaScript source code, while maintaining the line and
|
||||
column information associated between those snippets and the original source
|
||||
code. This is useful as the final intermediate representation a compiler might
|
||||
use before outputting the generated JS and source map.
|
||||
|
||||
#### new SourceNode([line, column, source[, chunk[, name]]])
|
||||
|
||||
* `line`: The original line number associated with this source node, or null if
|
||||
it isn't associated with an original line.
|
||||
|
||||
* `column`: The original column number associated with this source node, or null
|
||||
if it isn't associated with an original column.
|
||||
|
||||
* `source`: The original source's filename; null if no filename is provided.
|
||||
|
||||
* `chunk`: Optional. Is immediately passed to `SourceNode.prototype.add`, see
|
||||
below.
|
||||
|
||||
* `name`: Optional. The original identifier.
|
||||
|
||||
```js
|
||||
var node = new SourceNode(1, 2, "a.cpp", [
|
||||
new SourceNode(3, 4, "b.cpp", "extern int status;\n"),
|
||||
new SourceNode(5, 6, "c.cpp", "std::string* make_string(size_t n);\n"),
|
||||
new SourceNode(7, 8, "d.cpp", "int main(int argc, char** argv) {}\n"),
|
||||
]);
|
||||
```
|
||||
|
||||
#### SourceNode.fromStringWithSourceMap(code, sourceMapConsumer[, relativePath])
|
||||
|
||||
Creates a SourceNode from generated code and a SourceMapConsumer.
|
||||
|
||||
* `code`: The generated code
|
||||
|
||||
* `sourceMapConsumer` The SourceMap for the generated code
|
||||
|
||||
* `relativePath` The optional path that relative sources in `sourceMapConsumer`
|
||||
should be relative to.
|
||||
|
||||
```js
|
||||
var consumer = new SourceMapConsumer(fs.readFileSync("path/to/my-file.js.map", "utf8"));
|
||||
var node = SourceNode.fromStringWithSourceMap(fs.readFileSync("path/to/my-file.js"),
|
||||
consumer);
|
||||
```
|
||||
|
||||
#### SourceNode.prototype.add(chunk)
|
||||
|
||||
Add a chunk of generated JS to this source node.
|
||||
|
||||
* `chunk`: A string snippet of generated JS code, another instance of
|
||||
`SourceNode`, or an array where each member is one of those things.
|
||||
|
||||
```js
|
||||
node.add(" + ");
|
||||
node.add(otherNode);
|
||||
node.add([leftHandOperandNode, " + ", rightHandOperandNode]);
|
||||
```
|
||||
|
||||
#### SourceNode.prototype.prepend(chunk)
|
||||
|
||||
Prepend a chunk of generated JS to this source node.
|
||||
|
||||
* `chunk`: A string snippet of generated JS code, another instance of
|
||||
`SourceNode`, or an array where each member is one of those things.
|
||||
|
||||
```js
|
||||
node.prepend("/** Build Id: f783haef86324gf **/\n\n");
|
||||
```
|
||||
|
||||
#### SourceNode.prototype.setSourceContent(sourceFile, sourceContent)
|
||||
|
||||
Set the source content for a source file. This will be added to the
|
||||
`SourceMap` in the `sourcesContent` field.
|
||||
|
||||
* `sourceFile`: The filename of the source file
|
||||
|
||||
* `sourceContent`: The content of the source file
|
||||
|
||||
```js
|
||||
node.setSourceContent("module-one.scm",
|
||||
fs.readFileSync("path/to/module-one.scm"))
|
||||
```
|
||||
|
||||
#### SourceNode.prototype.walk(fn)
|
||||
|
||||
Walk over the tree of JS snippets in this node and its children. The walking
|
||||
function is called once for each snippet of JS and is passed that snippet and
|
||||
the its original associated source's line/column location.
|
||||
|
||||
* `fn`: The traversal function.
|
||||
|
||||
```js
|
||||
var node = new SourceNode(1, 2, "a.js", [
|
||||
new SourceNode(3, 4, "b.js", "uno"),
|
||||
"dos",
|
||||
[
|
||||
"tres",
|
||||
new SourceNode(5, 6, "c.js", "quatro")
|
||||
]
|
||||
]);
|
||||
|
||||
node.walk(function (code, loc) { console.log("WALK:", code, loc); })
|
||||
// WALK: uno { source: 'b.js', line: 3, column: 4, name: null }
|
||||
// WALK: dos { source: 'a.js', line: 1, column: 2, name: null }
|
||||
// WALK: tres { source: 'a.js', line: 1, column: 2, name: null }
|
||||
// WALK: quatro { source: 'c.js', line: 5, column: 6, name: null }
|
||||
```
|
||||
|
||||
#### SourceNode.prototype.walkSourceContents(fn)
|
||||
|
||||
Walk over the tree of SourceNodes. The walking function is called for each
|
||||
source file content and is passed the filename and source content.
|
||||
|
||||
* `fn`: The traversal function.
|
||||
|
||||
```js
|
||||
var a = new SourceNode(1, 2, "a.js", "generated from a");
|
||||
a.setSourceContent("a.js", "original a");
|
||||
var b = new SourceNode(1, 2, "b.js", "generated from b");
|
||||
b.setSourceContent("b.js", "original b");
|
||||
var c = new SourceNode(1, 2, "c.js", "generated from c");
|
||||
c.setSourceContent("c.js", "original c");
|
||||
|
||||
var node = new SourceNode(null, null, null, [a, b, c]);
|
||||
node.walkSourceContents(function (source, contents) { console.log("WALK:", source, ":", contents); })
|
||||
// WALK: a.js : original a
|
||||
// WALK: b.js : original b
|
||||
// WALK: c.js : original c
|
||||
```
|
||||
|
||||
#### SourceNode.prototype.join(sep)
|
||||
|
||||
Like `Array.prototype.join` except for SourceNodes. Inserts the separator
|
||||
between each of this source node's children.
|
||||
|
||||
* `sep`: The separator.
|
||||
|
||||
```js
|
||||
var lhs = new SourceNode(1, 2, "a.rs", "my_copy");
|
||||
var operand = new SourceNode(3, 4, "a.rs", "=");
|
||||
var rhs = new SourceNode(5, 6, "a.rs", "orig.clone()");
|
||||
|
||||
var node = new SourceNode(null, null, null, [ lhs, operand, rhs ]);
|
||||
var joinedNode = node.join(" ");
|
||||
```
|
||||
|
||||
#### SourceNode.prototype.replaceRight(pattern, replacement)
|
||||
|
||||
Call `String.prototype.replace` on the very right-most source snippet. Useful
|
||||
for trimming white space from the end of a source node, etc.
|
||||
|
||||
* `pattern`: The pattern to replace.
|
||||
|
||||
* `replacement`: The thing to replace the pattern with.
|
||||
|
||||
```js
|
||||
// Trim trailing white space.
|
||||
node.replaceRight(/\s*$/, "");
|
||||
```
|
||||
|
||||
#### SourceNode.prototype.toString()
|
||||
|
||||
Return the string representation of this source node. Walks over the tree and
|
||||
concatenates all the various snippets together to one string.
|
||||
|
||||
```js
|
||||
var node = new SourceNode(1, 2, "a.js", [
|
||||
new SourceNode(3, 4, "b.js", "uno"),
|
||||
"dos",
|
||||
[
|
||||
"tres",
|
||||
new SourceNode(5, 6, "c.js", "quatro")
|
||||
]
|
||||
]);
|
||||
|
||||
node.toString()
|
||||
// 'unodostresquatro'
|
||||
```
|
||||
|
||||
#### SourceNode.prototype.toStringWithSourceMap([startOfSourceMap])
|
||||
|
||||
Returns the string representation of this tree of source nodes, plus a
|
||||
SourceMapGenerator which contains all the mappings between the generated and
|
||||
original sources.
|
||||
|
||||
The arguments are the same as those to `new SourceMapGenerator`.
|
||||
|
||||
```js
|
||||
var node = new SourceNode(1, 2, "a.js", [
|
||||
new SourceNode(3, 4, "b.js", "uno"),
|
||||
"dos",
|
||||
[
|
||||
"tres",
|
||||
new SourceNode(5, 6, "c.js", "quatro")
|
||||
]
|
||||
]);
|
||||
|
||||
node.toStringWithSourceMap({ file: "my-output-file.js" })
|
||||
// { code: 'unodostresquatro',
|
||||
// map: [object SourceMapGenerator] }
|
||||
```
|
File diff suppressed because one or more lines are too long
File diff suppressed because it is too large
Load Diff
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
@ -1,121 +0,0 @@
|
||||
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||
/*
|
||||
* Copyright 2011 Mozilla Foundation and contributors
|
||||
* Licensed under the New BSD license. See LICENSE or:
|
||||
* http://opensource.org/licenses/BSD-3-Clause
|
||||
*/
|
||||
|
||||
var util = require('./util');
|
||||
var has = Object.prototype.hasOwnProperty;
|
||||
var hasNativeMap = typeof Map !== "undefined";
|
||||
|
||||
/**
|
||||
* A data structure which is a combination of an array and a set. Adding a new
|
||||
* member is O(1), testing for membership is O(1), and finding the index of an
|
||||
* element is O(1). Removing elements from the set is not supported. Only
|
||||
* strings are supported for membership.
|
||||
*/
|
||||
function ArraySet() {
|
||||
this._array = [];
|
||||
this._set = hasNativeMap ? new Map() : Object.create(null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Static method for creating ArraySet instances from an existing array.
|
||||
*/
|
||||
ArraySet.fromArray = function ArraySet_fromArray(aArray, aAllowDuplicates) {
|
||||
var set = new ArraySet();
|
||||
for (var i = 0, len = aArray.length; i < len; i++) {
|
||||
set.add(aArray[i], aAllowDuplicates);
|
||||
}
|
||||
return set;
|
||||
};
|
||||
|
||||
/**
|
||||
* Return how many unique items are in this ArraySet. If duplicates have been
|
||||
* added, than those do not count towards the size.
|
||||
*
|
||||
* @returns Number
|
||||
*/
|
||||
ArraySet.prototype.size = function ArraySet_size() {
|
||||
return hasNativeMap ? this._set.size : Object.getOwnPropertyNames(this._set).length;
|
||||
};
|
||||
|
||||
/**
|
||||
* Add the given string to this set.
|
||||
*
|
||||
* @param String aStr
|
||||
*/
|
||||
ArraySet.prototype.add = function ArraySet_add(aStr, aAllowDuplicates) {
|
||||
var sStr = hasNativeMap ? aStr : util.toSetString(aStr);
|
||||
var isDuplicate = hasNativeMap ? this.has(aStr) : has.call(this._set, sStr);
|
||||
var idx = this._array.length;
|
||||
if (!isDuplicate || aAllowDuplicates) {
|
||||
this._array.push(aStr);
|
||||
}
|
||||
if (!isDuplicate) {
|
||||
if (hasNativeMap) {
|
||||
this._set.set(aStr, idx);
|
||||
} else {
|
||||
this._set[sStr] = idx;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Is the given string a member of this set?
|
||||
*
|
||||
* @param String aStr
|
||||
*/
|
||||
ArraySet.prototype.has = function ArraySet_has(aStr) {
|
||||
if (hasNativeMap) {
|
||||
return this._set.has(aStr);
|
||||
} else {
|
||||
var sStr = util.toSetString(aStr);
|
||||
return has.call(this._set, sStr);
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* What is the index of the given string in the array?
|
||||
*
|
||||
* @param String aStr
|
||||
*/
|
||||
ArraySet.prototype.indexOf = function ArraySet_indexOf(aStr) {
|
||||
if (hasNativeMap) {
|
||||
var idx = this._set.get(aStr);
|
||||
if (idx >= 0) {
|
||||
return idx;
|
||||
}
|
||||
} else {
|
||||
var sStr = util.toSetString(aStr);
|
||||
if (has.call(this._set, sStr)) {
|
||||
return this._set[sStr];
|
||||
}
|
||||
}
|
||||
|
||||
throw new Error('"' + aStr + '" is not in the set.');
|
||||
};
|
||||
|
||||
/**
|
||||
* What is the element at the given index?
|
||||
*
|
||||
* @param Number aIdx
|
||||
*/
|
||||
ArraySet.prototype.at = function ArraySet_at(aIdx) {
|
||||
if (aIdx >= 0 && aIdx < this._array.length) {
|
||||
return this._array[aIdx];
|
||||
}
|
||||
throw new Error('No element indexed by ' + aIdx);
|
||||
};
|
||||
|
||||
/**
|
||||
* Returns the array representation of this set (which has the proper indices
|
||||
* indicated by indexOf). Note that this is a copy of the internal array used
|
||||
* for storing the members so that no one can mess with internal state.
|
||||
*/
|
||||
ArraySet.prototype.toArray = function ArraySet_toArray() {
|
||||
return this._array.slice();
|
||||
};
|
||||
|
||||
exports.ArraySet = ArraySet;
|
@ -1,140 +0,0 @@
|
||||
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||
/*
|
||||
* Copyright 2011 Mozilla Foundation and contributors
|
||||
* Licensed under the New BSD license. See LICENSE or:
|
||||
* http://opensource.org/licenses/BSD-3-Clause
|
||||
*
|
||||
* Based on the Base 64 VLQ implementation in Closure Compiler:
|
||||
* https://code.google.com/p/closure-compiler/source/browse/trunk/src/com/google/debugging/sourcemap/Base64VLQ.java
|
||||
*
|
||||
* Copyright 2011 The Closure Compiler Authors. All rights reserved.
|
||||
* Redistribution and use in source and binary forms, with or without
|
||||
* modification, are permitted provided that the following conditions are
|
||||
* met:
|
||||
*
|
||||
* * Redistributions of source code must retain the above copyright
|
||||
* notice, this list of conditions and the following disclaimer.
|
||||
* * Redistributions in binary form must reproduce the above
|
||||
* copyright notice, this list of conditions and the following
|
||||
* disclaimer in the documentation and/or other materials provided
|
||||
* with the distribution.
|
||||
* * Neither the name of Google Inc. nor the names of its
|
||||
* contributors may be used to endorse or promote products derived
|
||||
* from this software without specific prior written permission.
|
||||
*
|
||||
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*/
|
||||
|
||||
var base64 = require('./base64');
|
||||
|
||||
// A single base 64 digit can contain 6 bits of data. For the base 64 variable
|
||||
// length quantities we use in the source map spec, the first bit is the sign,
|
||||
// the next four bits are the actual value, and the 6th bit is the
|
||||
// continuation bit. The continuation bit tells us whether there are more
|
||||
// digits in this value following this digit.
|
||||
//
|
||||
// Continuation
|
||||
// | Sign
|
||||
// | |
|
||||
// V V
|
||||
// 101011
|
||||
|
||||
var VLQ_BASE_SHIFT = 5;
|
||||
|
||||
// binary: 100000
|
||||
var VLQ_BASE = 1 << VLQ_BASE_SHIFT;
|
||||
|
||||
// binary: 011111
|
||||
var VLQ_BASE_MASK = VLQ_BASE - 1;
|
||||
|
||||
// binary: 100000
|
||||
var VLQ_CONTINUATION_BIT = VLQ_BASE;
|
||||
|
||||
/**
|
||||
* Converts from a two-complement value to a value where the sign bit is
|
||||
* placed in the least significant bit. For example, as decimals:
|
||||
* 1 becomes 2 (10 binary), -1 becomes 3 (11 binary)
|
||||
* 2 becomes 4 (100 binary), -2 becomes 5 (101 binary)
|
||||
*/
|
||||
function toVLQSigned(aValue) {
|
||||
return aValue < 0
|
||||
? ((-aValue) << 1) + 1
|
||||
: (aValue << 1) + 0;
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts to a two-complement value from a value where the sign bit is
|
||||
* placed in the least significant bit. For example, as decimals:
|
||||
* 2 (10 binary) becomes 1, 3 (11 binary) becomes -1
|
||||
* 4 (100 binary) becomes 2, 5 (101 binary) becomes -2
|
||||
*/
|
||||
function fromVLQSigned(aValue) {
|
||||
var isNegative = (aValue & 1) === 1;
|
||||
var shifted = aValue >> 1;
|
||||
return isNegative
|
||||
? -shifted
|
||||
: shifted;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the base 64 VLQ encoded value.
|
||||
*/
|
||||
exports.encode = function base64VLQ_encode(aValue) {
|
||||
var encoded = "";
|
||||
var digit;
|
||||
|
||||
var vlq = toVLQSigned(aValue);
|
||||
|
||||
do {
|
||||
digit = vlq & VLQ_BASE_MASK;
|
||||
vlq >>>= VLQ_BASE_SHIFT;
|
||||
if (vlq > 0) {
|
||||
// There are still more digits in this value, so we must make sure the
|
||||
// continuation bit is marked.
|
||||
digit |= VLQ_CONTINUATION_BIT;
|
||||
}
|
||||
encoded += base64.encode(digit);
|
||||
} while (vlq > 0);
|
||||
|
||||
return encoded;
|
||||
};
|
||||
|
||||
/**
|
||||
* Decodes the next base 64 VLQ value from the given string and returns the
|
||||
* value and the rest of the string via the out parameter.
|
||||
*/
|
||||
exports.decode = function base64VLQ_decode(aStr, aIndex, aOutParam) {
|
||||
var strLen = aStr.length;
|
||||
var result = 0;
|
||||
var shift = 0;
|
||||
var continuation, digit;
|
||||
|
||||
do {
|
||||
if (aIndex >= strLen) {
|
||||
throw new Error("Expected more digits in base 64 VLQ value.");
|
||||
}
|
||||
|
||||
digit = base64.decode(aStr.charCodeAt(aIndex++));
|
||||
if (digit === -1) {
|
||||
throw new Error("Invalid base64 digit: " + aStr.charAt(aIndex - 1));
|
||||
}
|
||||
|
||||
continuation = !!(digit & VLQ_CONTINUATION_BIT);
|
||||
digit &= VLQ_BASE_MASK;
|
||||
result = result + (digit << shift);
|
||||
shift += VLQ_BASE_SHIFT;
|
||||
} while (continuation);
|
||||
|
||||
aOutParam.value = fromVLQSigned(result);
|
||||
aOutParam.rest = aIndex;
|
||||
};
|
@ -1,67 +0,0 @@
|
||||
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||
/*
|
||||
* Copyright 2011 Mozilla Foundation and contributors
|
||||
* Licensed under the New BSD license. See LICENSE or:
|
||||
* http://opensource.org/licenses/BSD-3-Clause
|
||||
*/
|
||||
|
||||
var intToCharMap = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/'.split('');
|
||||
|
||||
/**
|
||||
* Encode an integer in the range of 0 to 63 to a single base 64 digit.
|
||||
*/
|
||||
exports.encode = function (number) {
|
||||
if (0 <= number && number < intToCharMap.length) {
|
||||
return intToCharMap[number];
|
||||
}
|
||||
throw new TypeError("Must be between 0 and 63: " + number);
|
||||
};
|
||||
|
||||
/**
|
||||
* Decode a single base 64 character code digit to an integer. Returns -1 on
|
||||
* failure.
|
||||
*/
|
||||
exports.decode = function (charCode) {
|
||||
var bigA = 65; // 'A'
|
||||
var bigZ = 90; // 'Z'
|
||||
|
||||
var littleA = 97; // 'a'
|
||||
var littleZ = 122; // 'z'
|
||||
|
||||
var zero = 48; // '0'
|
||||
var nine = 57; // '9'
|
||||
|
||||
var plus = 43; // '+'
|
||||
var slash = 47; // '/'
|
||||
|
||||
var littleOffset = 26;
|
||||
var numberOffset = 52;
|
||||
|
||||
// 0 - 25: ABCDEFGHIJKLMNOPQRSTUVWXYZ
|
||||
if (bigA <= charCode && charCode <= bigZ) {
|
||||
return (charCode - bigA);
|
||||
}
|
||||
|
||||
// 26 - 51: abcdefghijklmnopqrstuvwxyz
|
||||
if (littleA <= charCode && charCode <= littleZ) {
|
||||
return (charCode - littleA + littleOffset);
|
||||
}
|
||||
|
||||
// 52 - 61: 0123456789
|
||||
if (zero <= charCode && charCode <= nine) {
|
||||
return (charCode - zero + numberOffset);
|
||||
}
|
||||
|
||||
// 62: +
|
||||
if (charCode == plus) {
|
||||
return 62;
|
||||
}
|
||||
|
||||
// 63: /
|
||||
if (charCode == slash) {
|
||||
return 63;
|
||||
}
|
||||
|
||||
// Invalid base64 digit.
|
||||
return -1;
|
||||
};
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in new issue