CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutSign UpSign In
rapid7

Real-time collaboration for Jupyter Notebooks, Linux Terminals, LaTeX, VS Code, R IDE, and more,
all in one place.

GitHub Repository: rapid7/metasploit-framework
Path: blob/master/docs/build.rb
Views: 11704
1
require 'fileutils'
2
require 'uri'
3
require 'open3'
4
require 'optparse'
5
require 'did_you_mean'
6
require 'kramdown'
7
require_relative './navigation'
8
9
# This build module was used to migrate the old Metasploit wiki https://github.com/rapid7/metasploit-framework/wiki into a format
10
# supported by Jekyll. Jekyll was chosen as it was written in Ruby, which should reduce the barrier to entry for contributions.
11
#
12
# The build script took the flatlist of markdown files from the wiki, and converted them into the hierarchical folder structure
13
# for nested documentation. This configuration is defined in `navigation.rb`
14
#
15
# In the future a different site generator could be used, but it should be possible to use this build script again to migrate to a new format
16
#
17
# For now the doc folder only contains the key files for building the docs site and no content. The content is created on demand
18
# from the `metasploit-framework.wiki` folder on each build
19
module Build
20
# The metasploit-framework.wiki files that are committed to Metasploit framework's repository
21
WIKI_PATH = 'metasploit-framework.wiki'.freeze
22
# A locally cloned version of https://github.com/rapid7/metasploit-framework/wiki - should no longer be required for normal workflows
23
OLD_WIKI_PATH = 'metasploit-framework.wiki.old'.freeze
24
RELEASE_BUILD_ARTIFACTS = '_site'.freeze
25
26
# For now we Git clone the existing metasploit wiki and generate the Jekyll markdown files
27
# for each build. This allows changes to be made to the existing wiki until it's migrated
28
# into the main framework repo
29
module Git
30
def self.clone_wiki!
31
unless File.exist?(OLD_WIKI_PATH)
32
Build.run_command "git clone https://github.com/rapid7/metasploit-framework.wiki.git #{OLD_WIKI_PATH}", exception: true
33
end
34
35
Build.run_command "cd #{OLD_WIKI_PATH}; git pull", exception: true
36
end
37
end
38
39
class ConfigValidationError < StandardError
40
end
41
42
# Configuration for generating the new website hierarchy, from the existing metasploit-framework wiki
43
class Config
44
include Enumerable
45
46
def initialize(config)
47
@config = config
48
end
49
50
def validate!
51
configured_paths = all_file_paths
52
missing_paths = available_paths.map { |path| path.gsub("#{WIKI_PATH}/", '') } - ignored_paths - existing_docs - configured_paths
53
raise ConfigValidationError, "Unhandled paths #{missing_paths.join(', ')} - add navigation entries to navigation.rb for these files" if missing_paths.any?
54
55
each do |page|
56
page_keys = page.keys
57
allowed_keys = %i[old_wiki_path path new_base_name nav_order title new_path folder children has_children parents]
58
invalid_keys = page_keys - allowed_keys
59
60
suggestion = DidYouMean::SpellChecker.new(dictionary: allowed_keys).correct(invalid_keys[0]).first
61
error = "#{page} had invalid keys #{invalid_keys.join(', ')}."
62
error += " Did you mean #{suggestion}?" if suggestion
63
64
raise ConfigValidationError, error if invalid_keys.any?
65
end
66
67
# Ensure unique folder names
68
folder_titles = to_enum.select { |page| page[:folder] }.map { |page| page[:title] }
69
duplicate_folder = folder_titles.tally.select { |_name, count| count > 1 }
70
raise ConfigValidationError, "Duplicate folder titles, will cause issues: #{duplicate_folder}" if duplicate_folder.any?
71
72
# Ensure no folder titles match file titles
73
page_titles = to_enum.reject { |page| page[:folder] }.map { |page| page[:title] }
74
title_collisions = (folder_titles & page_titles).tally
75
raise ConfigValidationError, "Duplicate folder/page titles, will cause issues: #{title_collisions}" if title_collisions.any?
76
77
# Ensure there are no files being migrated to multiple places
78
page_paths = to_enum.reject { |page| page[:path] }.map { |page| page[:title] }
79
duplicate_page_paths = page_paths.tally.select { |_name, count| count > 1 }
80
raise ConfigValidationError, "Duplicate paths, will cause issues: #{duplicate_page_paths}" if duplicate_page_paths.any?
81
82
# Ensure new file paths are only alphanumeric and hyphenated
83
new_paths = to_enum.map { |page| page[:new_path] }
84
invalid_new_paths = new_paths.reject { |path| File.basename(path) =~ /^[a-zA-Z0-9_-]*\.md$/ }
85
raise ConfigValidationError, "Only alphanumeric and hyphenated file names required: #{invalid_new_paths}" if invalid_new_paths.any?
86
end
87
88
def available_paths
89
Dir.glob("#{WIKI_PATH}/**/*{.md,.textile}", File::FNM_DOTMATCH)
90
end
91
92
def ignored_paths
93
[
94
]
95
end
96
97
def existing_docs
98
existing_docs = Dir.glob('docs/**/*', File::FNM_DOTMATCH)
99
existing_docs
100
end
101
102
def each(&block)
103
config.each do |parent|
104
recurse(with_metadata(parent), &block)
105
end
106
end
107
108
def all_file_paths
109
to_enum.map { |item| item[:path] }.to_a
110
end
111
112
protected
113
114
# depth first traversal
115
def recurse(parent_with_metadata, &block)
116
block.call(parent_with_metadata)
117
parent_with_metadata[:children].to_a.each do |child|
118
child_with_metadata = with_metadata(child, parents: parent_with_metadata[:parents] + [parent_with_metadata])
119
recurse(child_with_metadata, &block)
120
end
121
end
122
123
def with_metadata(child, parents: [])
124
child = child.clone
125
126
if child[:folder]
127
parent_folders = parents.map { |page| page[:folder] }
128
child[:new_path] = File.join(*parent_folders, child[:folder], 'index.md')
129
else
130
path = child[:path]
131
base_name = child[:new_base_name] || File.basename(path)
132
133
# title calculation
134
computed_title = File.basename(base_name, '.md').gsub('-', ' ')
135
if child[:title].is_a?(Proc)
136
child[:title] = child[:title].call(computed_title)
137
else
138
child[:title] ||= computed_title
139
end
140
141
parent_folders = parents.map { |page| page[:folder] }
142
child[:new_path] = File.join(*parent_folders, base_name.downcase)
143
end
144
145
child[:parents] = parents
146
child[:has_children] = true if child[:children].to_a.any?
147
148
child
149
end
150
151
attr_reader :config
152
end
153
154
# Extracts markdown links from https://github.com/rapid7/metasploit-framework/wiki into a Jekyll format
155
# Additionally corrects links to Github
156
class LinkCorrector
157
def initialize(config)
158
@config = config
159
@links = {}
160
end
161
162
def syntax_errors_for(markdown)
163
MarkdownLinkSyntaxVerifier.errors_for(markdown)
164
end
165
166
def extract(markdown)
167
extracted_absolute_wiki_links = extract_absolute_wiki_links(markdown)
168
@links = @links.merge(extracted_absolute_wiki_links)
169
170
extracted_relative_links = extract_relative_links(markdown)
171
@links = @links.merge(extracted_relative_links)
172
173
@links
174
end
175
176
def rerender(markdown)
177
links ||= @links
178
179
new_markdown = markdown.clone
180
links.each_value do |link|
181
new_markdown.gsub!(link[:full_match], link[:replacement])
182
end
183
184
new_markdown
185
end
186
187
attr_reader :links
188
189
protected
190
191
def pages
192
@config.enum_for(:each).map { |page| page }
193
end
194
195
# scans for absolute links to the old wiki such as 'https://docs.metasploit.com/docs/using-metasploit/advanced/metasploit-web-service.html'
196
def extract_absolute_wiki_links(markdown)
197
new_links = {}
198
199
markdown.scan(%r{(https?://github.com/rapid7/metasploit-framework/wiki/([\w().%_#-]+))}) do |full_match, old_path|
200
full_match = full_match.gsub(/[).]+$/, '')
201
old_path = URI.decode_www_form_component(old_path.gsub(/[).]+$/, ''))
202
203
begin
204
old_path_anchor = URI.parse(old_path).fragment
205
rescue URI::InvalidURIError
206
old_path_anchor = nil
207
end
208
209
new_path = new_path_for(old_path, old_path_anchor)
210
replacement = "{% link docs/#{new_path} %}#{old_path_anchor ? "##{old_path_anchor}" : ""}"
211
212
link = {
213
full_match: full_match,
214
type: :absolute,
215
new_path: new_path,
216
replacement: replacement
217
}
218
219
new_links[full_match] = link
220
end
221
222
new_links
223
end
224
225
# Scans for Github wiki flavor links such as:
226
# '[[Relative Path]]'
227
# '[[Custom name|Relative Path]]'
228
# '[[Custom name|relative-path]]'
229
# '[[Custom name|./relative-path.md]]'
230
# '[[Custom name|./relative-path.md#section-anchor-to-link-to]]'
231
# Note that the page target resource file is validated for existence at build time - but the section anchors are not
232
def extract_relative_links(markdown)
233
existing_links = @links
234
new_links = {}
235
236
markdown.scan(/(\[\[([\w\/_ '().:,-]+)(?:\|([\w\/_ '():,.#-]+))?\]\])/) do |full_match, left, right|
237
old_path = (right || left)
238
begin
239
old_path_anchor = URI.parse(old_path).fragment
240
rescue URI::InvalidURIError
241
old_path_anchor = nil
242
end
243
new_path = new_path_for(old_path, old_path_anchor)
244
if existing_links[full_match] && existing_links[full_match][:new_path] != new_path
245
raise "Link for #{full_match} previously resolved to #{existing_links[full_match][:new_path]}, but now resolves to #{new_path}"
246
end
247
248
link_text = left
249
replacement = "[#{link_text}]({% link docs/#{new_path} %}#{old_path_anchor ? "##{old_path_anchor}" : ""})"
250
251
link = {
252
full_match: full_match,
253
type: :relative,
254
left: left,
255
right: right,
256
new_path: new_path,
257
replacement: replacement
258
}
259
260
new_links[full_match] = link
261
end
262
263
new_links
264
end
265
266
def new_path_for(old_path, old_path_anchor)
267
# Strip out any leading `./` or `/` before the relative path.
268
# This is needed for our later code that does additional filtering for
269
# potential ambiguity with absolute paths since those comparisons occur
270
# against filenames without the leading ./ and / parts.
271
old_path = old_path.gsub(/^[.\/]+/, '')
272
273
# Replace any spaces in the file name with - separators, then
274
# make replace anchors with an empty string.
275
old_path = old_path.gsub(' ', '-').gsub("##{old_path_anchor}", '')
276
277
matched_pages = pages.select do |page|
278
!page[:folder] &&
279
(File.basename(page[:path]).downcase == "#{File.basename(old_path)}.md".downcase ||
280
File.basename(page[:path]).downcase == "#{File.basename(old_path)}".downcase)
281
end
282
if matched_pages.empty?
283
raise "Link not found: #{old_path}"
284
end
285
# Additional filter for absolute paths if there's potential ambiguity
286
if matched_pages.count > 1
287
refined_pages = matched_pages.select do |page|
288
!page[:folder] &&
289
(page[:path].downcase == "#{old_path}.md".downcase ||
290
page[:path].downcase == old_path.downcase)
291
end
292
293
if refined_pages.count != 1
294
page_paths = matched_pages.map { |page| page[:path] }
295
raise "Duplicate paths for #{old_path} - possible page paths found: #{page_paths}"
296
end
297
298
matched_pages = refined_pages
299
end
300
301
matched_pages.first.fetch(:new_path)
302
end
303
end
304
305
# Verifies that markdown links are not relative. Instead the Github wiki flavored syntax should be used.
306
#
307
# Example bad: `[Human readable text](./some-documentation-link)`
308
# Example good: `[[Human readable text|./some-documentation-link]]`
309
class MarkdownLinkSyntaxVerifier
310
# Detects the usage of bad syntax and returns an array of detected errors
311
#
312
# @param [String] markdown The markdown
313
# @return [Array<String>] An array of human readable errors that should be resolved
314
def self.errors_for(markdown)
315
document = Kramdown::Document.new(markdown)
316
document.to_validated_wiki_page
317
warnings = document.warnings.select { |warning| warning.start_with?(Kramdown::Converter::ValidatedWikiPage::WARNING_PREFIX) }
318
warnings
319
end
320
321
# Implementation detail: There doesn't seem to be a generic AST visitor pattern library for Ruby; We instead implement
322
# Kramdown's Markdown to HTML Converter API, override the link converter method, and warn on any invalid links that are identified.
323
# The {MarkdownLinkVerifier} will ignore the HTML result, and return any detected errors instead.
324
#
325
# https://kramdown.gettalong.org/rdoc/Kramdown/Converter/Html.html
326
class Kramdown::Converter::ValidatedWikiPage < Kramdown::Converter::Html
327
WARNING_PREFIX = '[WikiLinkValidation]'
328
329
def convert_a(el, indent)
330
link_href = el.attr['href']
331
if relative_link?(link_href)
332
link_text = el.children.map { |child| convert(child) }.join
333
warning "Invalid docs link syntax found on line #{el.options[:location]}: Invalid relative link #{link_href} found. Please use the syntax [[#{link_text}|#{link_href}]] instead"
334
end
335
336
if absolute_docs_link?(link_href)
337
begin
338
example_path = ".#{URI.parse(link_href).path}"
339
rescue URI::InvalidURIError
340
example_path = "./path-to-markdown-file"
341
end
342
343
link_text = el.children.map { |child| convert(child) }.join
344
warning "Invalid docs link syntax found on line #{el.options[:location]}: Invalid absolute link #{link_href} found. Please use relative links instead, i.e. [[#{link_text}|#{example_path}]] instead"
345
end
346
347
super
348
end
349
350
private
351
352
def warning(text)
353
super "#{WARNING_PREFIX} #{text}"
354
end
355
356
def relative_link?(link_path)
357
!(link_path.start_with?('http:') || link_path.start_with?('https:') || link_path.start_with?('mailto:') || link_path.start_with?('#'))
358
end
359
360
# @return [TrueClass, FalseClass] True if the link is to a Metasploit docs page that isn't either the root home page or the API site, otherwise false
361
def absolute_docs_link?(link_path)
362
link_path.include?('docs.metasploit.com') && !link_path.include?('docs.metasploit.com/api') && !(link_path == 'https://docs.metasploit.com/')
363
end
364
end
365
end
366
367
# Parses a wiki page and can add/remove/update a deprecation notice
368
class WikiDeprecationText
369
MAINTAINER_MESSAGE_PREFIX = "<!-- Maintainers: "
370
private_constant :MAINTAINER_MESSAGE_PREFIX
371
372
USER_MESSAGE_PREFIX = '**Documentation Update:'.freeze
373
private_constant :USER_MESSAGE_PREFIX
374
375
def self.upsert(original_wiki_content, old_path:, new_url:)
376
history_link = old_path.include?("#{WIKI_PATH}/Home.md") ? './Home/_history' : './_history'
377
maintainer_message = "#{MAINTAINER_MESSAGE_PREFIX} Please do not modify this file directly, create a pull request instead -->\n\n"
378
user_message = "#{USER_MESSAGE_PREFIX} This Wiki page should be viewable at [#{new_url}](#{new_url}). Or if it is no longer available, see this page's [previous history](#{history_link})**\n\n"
379
deprecation_text = maintainer_message + user_message
380
"#{deprecation_text}"
381
end
382
383
def self.remove(original_wiki_content)
384
original_wiki_content
385
.gsub(/^#{Regexp.escape(MAINTAINER_MESSAGE_PREFIX)}.*$\s+/, '')
386
.gsub(/^#{Regexp.escape(USER_MESSAGE_PREFIX)}.*$\s+/, '')
387
end
388
end
389
390
# Converts Wiki markdown pages into a valid Jekyll format
391
class WikiMigration
392
# Implements two core components:
393
# - Converts the existing Wiki markdown pages into a Jekyll format
394
# - Optionally updates the existing Wiki markdown pages with a link to the new website location
395
def run(config, options = {})
396
begin
397
config.validate!
398
rescue
399
puts "[!] Validation failed. Please verify navigation.rb is valid, as well as the markdown file"
400
raise
401
end
402
403
# Clean up new docs folder in preparation for regenerating it entirely from the latest wiki
404
result_folder = File.join('.', 'docs')
405
FileUtils.remove_dir(result_folder, true)
406
FileUtils.mkdir(result_folder)
407
408
link_corrector = link_corrector_for(config)
409
config.each do |page|
410
page_config = {
411
layout: 'default',
412
**page.slice(:title, :has_children, :nav_order),
413
parent: (page[:parents][-1] || {})[:title],
414
warning: "Do not modify this file directly. Please modify metasploit-framework/docs/metasploit-framework.wiki instead",
415
old_path: page[:path] ? File.join(WIKI_PATH, page[:path]) : "none - folder automatically generated",
416
has_content: !page[:path].nil?
417
}.compact
418
419
page_config[:has_children] = true if page[:has_children]
420
preamble = <<~PREAMBLE
421
---
422
#{page_config.map { |key, value| "#{key}: #{value.to_s.strip.inspect}" }.join("\n")}
423
---
424
425
PREAMBLE
426
427
new_path = File.join(result_folder, page[:new_path])
428
FileUtils.mkdir_p(File.dirname(new_path))
429
430
if page[:folder] && page[:path].nil?
431
new_docs_content = preamble.rstrip + "\n"
432
else
433
old_path = File.join(WIKI_PATH, page[:path])
434
previous_content = File.read(old_path, encoding: Encoding::UTF_8)
435
new_docs_content = preamble + WikiDeprecationText.remove(previous_content)
436
new_docs_content = link_corrector.rerender(new_docs_content)
437
438
# Update the old Wiki with links to the new website
439
if options[:update_wiki_deprecation_notice]
440
new_url = options[:update_wiki_deprecation_notice][:new_website_url]
441
if page[:new_path] != 'home.md'
442
new_url += 'docs/' + page[:new_path].gsub('.md', '.html')
443
end
444
updated_wiki_content = WikiDeprecationText.upsert(previous_content, old_path: old_path, new_url: new_url)
445
old_wiki_path = File.join(WIKI_PATH, page[:path])
446
File.write(old_wiki_path, updated_wiki_content, mode: 'w', encoding: Encoding::UTF_8)
447
end
448
end
449
450
File.write(new_path, new_docs_content, mode: 'w', encoding: Encoding::UTF_8)
451
end
452
453
# Now that the docs folder is created, time to move the home.md file out
454
FileUtils.mv('docs/home.md', 'index.md')
455
end
456
457
protected
458
459
def link_corrector_for(config)
460
link_corrector = LinkCorrector.new(config)
461
errors = []
462
config.each do |page|
463
unless page[:path].nil?
464
content = File.read(File.join(WIKI_PATH, page[:path]), encoding: Encoding::UTF_8)
465
syntax_errors = link_corrector.syntax_errors_for(content)
466
errors << { path: page[:path], messages: syntax_errors } if syntax_errors.any?
467
468
link_corrector.extract(content)
469
end
470
end
471
472
if errors.any?
473
errors.each do |error|
474
$stderr.puts "[!] Error #{File.join(WIKI_PATH, error[:path])}:\n#{error[:messages].map { |message| "\t- #{message}\n" }.join}"
475
end
476
477
raise "Errors found in markdown syntax"
478
end
479
480
link_corrector
481
end
482
end
483
484
# Serve the release build at http://127.0.0.1:4000/metasploit-framework/
485
class ReleaseBuildServer
486
autoload :WEBrick, 'webrick'
487
488
def self.run
489
server = WEBrick::HTTPServer.new(
490
{
491
Port: 4000
492
}
493
)
494
server.mount('/', WEBrick::HTTPServlet::FileHandler, RELEASE_BUILD_ARTIFACTS)
495
trap('INT') do
496
server.shutdown
497
rescue StandardError
498
nil
499
end
500
server.start
501
ensure
502
server.shutdown
503
end
504
end
505
506
def self.run_command(command, exception: true)
507
puts "[*] #{command}"
508
result = ''
509
::Open3.popen2e(
510
{ 'BUNDLE_GEMFILE' => File.join(Dir.pwd, 'Gemfile') },
511
'/bin/bash', '--login', '-c', command
512
) do |stdin, stdout_and_stderr, wait_thread|
513
stdin.close_write
514
515
while wait_thread.alive?
516
ready = IO.select([stdout_and_stderr], nil, nil, 1)
517
518
next unless ready
519
reads, _writes, _errors = ready
520
521
reads.to_a.each do |io|
522
data = io.read_nonblock(1024)
523
puts data
524
result += data
525
rescue EOFError, Errno::EAGAIN
526
# noop
527
end
528
end
529
530
if !wait_thread.value.success? && exception
531
raise "command #{command.inspect} did not succeed, exit status #{wait_thread.value.exitstatus.inspect}"
532
end
533
end
534
535
result
536
end
537
538
def self.run(options)
539
Git.clone_wiki! if options[:wiki_pull]
540
541
# Create a new branch based on the commits from https://github.com/rapid7/metasploit-framework/wiki to move
542
# Wiki files into the metasploit-framework repo
543
if options[:create_wiki_to_framework_migration_branch]
544
starting_branch = run_command("git rev-parse --abbrev-ref HEAD").chomp
545
new_wiki_branch_name = "move-all-docs-into-folder"
546
new_framework_branch_name = "merge-metasploit-framework-wiki-into-metasploit-framework"
547
548
begin
549
# Create a new folder and branch in the old metasploit wiki for where we'd like it to be inside of the metasploit-framework repo
550
Dir.chdir(OLD_WIKI_PATH) do
551
# Reset the repo back
552
run_command("git checkout master", exception: false)
553
run_command("git reset HEAD --hard", exception: false)
554
run_command("rm -rf metasploit-framework.wiki", exception: false)
555
556
# Create a new folder to move the wiki contents into
557
FileUtils.mkdir_p("metasploit-framework.wiki")
558
run_command("mv *[^metasploit-framework.wiki]* metasploit-framework.wiki", exception: false)
559
560
# Create a new branch + commit
561
run_command("git branch -D #{new_wiki_branch_name}", exception: false)
562
run_command("git checkout -b #{new_wiki_branch_name}")
563
run_command("git add metasploit-framework.wiki")
564
run_command("git commit -am 'Put markdown files into new folder metasploit-framework.wiki in preparation for migration'")
565
end
566
567
# Create a new branch that can be used to create a pull request
568
run_command("git branch -D #{new_framework_branch_name}", exception: false)
569
run_command("git checkout -b #{new_framework_branch_name}")
570
run_command("git remote remove wiki", exception: false)
571
run_command("git remote add -f wiki #{File.join(Dir.pwd, OLD_WIKI_PATH)}", exception: false)
572
# run_command("git remote update wiki")
573
run_command("git merge -m 'Migrate docs from https://github.com/rapid7/metasploit-framework/wiki to main repository' wiki/#{new_wiki_branch_name} --allow-unrelated-histories")
574
575
puts "new branch #{new_framework_branch_name} successfully created"
576
ensure
577
run_command("git checkout #{starting_branch}")
578
end
579
end
580
581
if options[:copy_old_wiki]
582
FileUtils.copy_entry(OLD_WIKI_PATH, WIKI_PATH, preserve = false, dereference_root = false, remove_destination = true)
583
# Remove any deprecation text that might be present after copying the old wiki
584
Dir.glob(File.join(WIKI_PATH, '**', '*.md')) do |path|
585
previous_content = File.read(path, encoding: Encoding::UTF_8)
586
new_content = WikiDeprecationText.remove(previous_content)
587
588
File.write(path, new_content, mode: 'w', encoding: Encoding::UTF_8)
589
end
590
end
591
592
unless options[:build_content]
593
config = Config.new(NAVIGATION_CONFIG)
594
migrator = WikiMigration.new
595
migrator.run(config, options)
596
end
597
598
if options[:production]
599
FileUtils.remove_dir(RELEASE_BUILD_ARTIFACTS, true)
600
run_command('JEKYLL_ENV=production bundle exec jekyll build')
601
602
if options[:serve]
603
ReleaseBuildServer.run
604
end
605
elsif options[:staging]
606
FileUtils.remove_dir(RELEASE_BUILD_ARTIFACTS, true)
607
run_command('JEKYLL_ENV=production bundle exec jekyll build --config _config.yml,_config_staging.yml')
608
609
if options[:serve]
610
ReleaseBuildServer.run
611
end
612
elsif options[:serve]
613
run_command('bundle exec jekyll serve --config _config.yml,_config_development.yml --incremental')
614
end
615
end
616
end
617
618
if $PROGRAM_NAME == __FILE__
619
options = {
620
copy_old_wiki: false,
621
wiki_pull: false
622
}
623
options_parser = OptionParser.new do |opts|
624
opts.banner = "Usage: #{File.basename(__FILE__)} [options]"
625
626
opts.on '-h', '--help', 'Help banner.' do
627
return print(opts.help)
628
end
629
630
opts.on('--production', 'Run a production build') do |production|
631
options[:production] = production
632
end
633
634
opts.on('--staging', 'Run a staging build for deploying to gh-pages') do |staging|
635
options[:staging] = staging
636
end
637
638
opts.on('--serve', 'serve the docs site') do |serve|
639
options[:serve] = serve
640
end
641
642
opts.on('--[no]-copy-old-wiki [FLAG]', TrueClass, 'Copy the content from the old wiki to the new local wiki folder') do |copy_old_wiki|
643
options[:copy_old_wiki] = copy_old_wiki
644
end
645
646
opts.on('--[no-]-wiki-pull', FalseClass, 'Pull the Metasploit Wiki') do |wiki_pull|
647
options[:wiki_pull] = wiki_pull
648
end
649
650
opts.on('--update-wiki-deprecation-notice [WEBSITE_URL]', 'Updates the old wiki deprecation notes') do |new_website_url|
651
new_website_url ||= 'https://docs.metasploit.com/'
652
options[:update_wiki_deprecation_notice] = {
653
new_website_url: new_website_url
654
}
655
end
656
657
opts.on('--create-wiki-to-framework-migration-branch') do
658
options[:create_wiki_to_framework_migration_branch] = true
659
end
660
end
661
if ARGV.length == 0
662
puts options_parser.help
663
exit 1
664
end
665
options_parser.parse!
666
667
Build.run(options)
668
end
669
670