Compare commits

...

No commits in common. "main" and "master" have entirely different histories.
main ... master

179 changed files with 11511 additions and 308 deletions

3
.idea/.gitignore vendored Normal file
View File

@ -0,0 +1,3 @@
# Default ignored files
/shelf/
/workspace.xml

View File

@ -0,0 +1,68 @@
<component name="libraryTable">
<library name="groovy-4.0.2" type="groovy-sdk">
<properties version="4.0.2" />
<CLASSES>
<root url="jar://$PROJECT_DIR$/lib/groovy-dateutil-4.0.2.jar!/" />
<root url="jar://$PROJECT_DIR$/lib/groovy-ant-4.0.2.jar!/" />
<root url="jar://$PROJECT_DIR$/lib/groovy-cli-picocli-4.0.2.jar!/" />
<root url="jar://$PROJECT_DIR$/lib/groovy-groovydoc-4.0.2.jar!/" />
<root url="jar://$PROJECT_DIR$/lib/groovy-cli-commons-4.0.2.jar!/" />
<root url="jar://$PROJECT_DIR$/lib/groovy-contracts-4.0.2.jar!/" />
<root url="jar://$PROJECT_DIR$/lib/groovy-docgenerator-4.0.2.jar!/" />
<root url="jar://$PROJECT_DIR$/lib/groovy-astbuilder-4.0.2.jar!/" />
<root url="jar://$PROJECT_DIR$/lib/groovy-macro-library-4.0.2.jar!/" />
<root url="jar://$PROJECT_DIR$/lib/groovy-ginq-4.0.2.jar!/" />
<root url="jar://$PROJECT_DIR$/lib/groovy-jmx-4.0.2.jar!/" />
<root url="jar://$PROJECT_DIR$/lib/groovy-servlet-4.0.2.jar!/" />
<root url="jar://$PROJECT_DIR$/lib/groovy-json-4.0.2.jar!/" />
<root url="jar://$PROJECT_DIR$/lib/groovy-macro-4.0.2.jar!/" />
<root url="jar://$PROJECT_DIR$/lib/groovy-sql-4.0.2.jar!/" />
<root url="jar://$PROJECT_DIR$/lib/groovy-test-junit5-4.0.2.jar!/" />
<root url="jar://$PROJECT_DIR$/lib/groovy-test-4.0.2.jar!/" />
<root url="jar://$PROJECT_DIR$/lib/groovy-testng-4.0.2.jar!/" />
<root url="jar://$PROJECT_DIR$/lib/groovy-templates-4.0.2.jar!/" />
<root url="jar://$PROJECT_DIR$/lib/groovy-groovysh-4.0.2.jar!/" />
<root url="jar://$PROJECT_DIR$/lib/groovy-typecheckers-4.0.2.jar!/" />
<root url="jar://$PROJECT_DIR$/lib/groovy-swing-4.0.2.jar!/" />
<root url="jar://$PROJECT_DIR$/lib/groovy-xml-4.0.2.jar!/" />
<root url="jar://$PROJECT_DIR$/lib/groovy-jsr223-4.0.2.jar!/" />
<root url="jar://$PROJECT_DIR$/lib/groovy-datetime-4.0.2.jar!/" />
<root url="jar://$PROJECT_DIR$/lib/groovy-console-4.0.2.jar!/" />
<root url="jar://$PROJECT_DIR$/lib/groovy-nio-4.0.2.jar!/" />
<root url="jar://$PROJECT_DIR$/lib/groovy-toml-4.0.2.jar!/" />
<root url="jar://$PROJECT_DIR$/lib/groovy-4.0.2.jar!/" />
</CLASSES>
<JAVADOC />
<SOURCES>
<root url="jar://$PROJECT_DIR$/lib/groovy-astbuilder-4.0.2-sources.jar!/" />
<root url="jar://$PROJECT_DIR$/lib/groovy-ant-4.0.2-sources.jar!/" />
<root url="jar://$PROJECT_DIR$/lib/groovy-cli-commons-4.0.2-sources.jar!/" />
<root url="jar://$PROJECT_DIR$/lib/groovy-cli-picocli-4.0.2-sources.jar!/" />
<root url="jar://$PROJECT_DIR$/lib/groovy-datetime-4.0.2-sources.jar!/" />
<root url="jar://$PROJECT_DIR$/lib/groovy-dateutil-4.0.2-sources.jar!/" />
<root url="jar://$PROJECT_DIR$/lib/groovy-docgenerator-4.0.2-sources.jar!/" />
<root url="jar://$PROJECT_DIR$/lib/groovy-console-4.0.2-sources.jar!/" />
<root url="jar://$PROJECT_DIR$/lib/groovy-ginq-4.0.2-sources.jar!/" />
<root url="jar://$PROJECT_DIR$/lib/groovy-contracts-4.0.2-sources.jar!/" />
<root url="jar://$PROJECT_DIR$/lib/groovy-jmx-4.0.2-sources.jar!/" />
<root url="jar://$PROJECT_DIR$/lib/groovy-jsr223-4.0.2-sources.jar!/" />
<root url="jar://$PROJECT_DIR$/lib/groovy-groovydoc-4.0.2-sources.jar!/" />
<root url="jar://$PROJECT_DIR$/lib/groovy-macro-library-4.0.2-sources.jar!/" />
<root url="jar://$PROJECT_DIR$/lib/groovy-macro-4.0.2-sources.jar!/" />
<root url="jar://$PROJECT_DIR$/lib/groovy-json-4.0.2-sources.jar!/" />
<root url="jar://$PROJECT_DIR$/lib/groovy-groovysh-4.0.2-sources.jar!/" />
<root url="jar://$PROJECT_DIR$/lib/groovy-nio-4.0.2-sources.jar!/" />
<root url="jar://$PROJECT_DIR$/lib/groovy-servlet-4.0.2-sources.jar!/" />
<root url="jar://$PROJECT_DIR$/lib/groovy-sql-4.0.2-sources.jar!/" />
<root url="jar://$PROJECT_DIR$/lib/groovy-testng-4.0.2-sources.jar!/" />
<root url="jar://$PROJECT_DIR$/lib/groovy-test-junit5-4.0.2-sources.jar!/" />
<root url="jar://$PROJECT_DIR$/lib/groovy-templates-4.0.2-sources.jar!/" />
<root url="jar://$PROJECT_DIR$/lib/groovy-toml-4.0.2-sources.jar!/" />
<root url="jar://$PROJECT_DIR$/lib/groovy-test-4.0.2-sources.jar!/" />
<root url="jar://$PROJECT_DIR$/lib/groovy-swing-4.0.2-sources.jar!/" />
<root url="jar://$PROJECT_DIR$/lib/groovy-typecheckers-4.0.2-sources.jar!/" />
<root url="jar://$PROJECT_DIR$/lib/groovy-xml-4.0.2-sources.jar!/" />
<root url="jar://$PROJECT_DIR$/lib/groovy-4.0.2-sources.jar!/" />
</SOURCES>
</library>
</component>

6
.idea/misc.xml Normal file
View File

@ -0,0 +1,6 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="ProjectRootManager" version="2" languageLevel="JDK_19" default="true" project-jdk-name="19" project-jdk-type="JavaSDK">
<output url="file://$PROJECT_DIR$/out" />
</component>
</project>

8
.idea/modules.xml Normal file
View File

@ -0,0 +1,8 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="ProjectModuleManager">
<modules>
<module fileurl="file://$PROJECT_DIR$/test.iml" filepath="$PROJECT_DIR$/test.iml" />
</modules>
</component>
</project>

124
.idea/uiDesigner.xml Normal file
View File

@ -0,0 +1,124 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="Palette2">
<group name="Swing">
<item class="com.intellij.uiDesigner.HSpacer" tooltip-text="Horizontal Spacer" icon="/com/intellij/uiDesigner/icons/hspacer.svg" removable="false" auto-create-binding="false" can-attach-label="false">
<default-constraints vsize-policy="1" hsize-policy="6" anchor="0" fill="1" />
</item>
<item class="com.intellij.uiDesigner.VSpacer" tooltip-text="Vertical Spacer" icon="/com/intellij/uiDesigner/icons/vspacer.svg" removable="false" auto-create-binding="false" can-attach-label="false">
<default-constraints vsize-policy="6" hsize-policy="1" anchor="0" fill="2" />
</item>
<item class="javax.swing.JPanel" icon="/com/intellij/uiDesigner/icons/panel.svg" removable="false" auto-create-binding="false" can-attach-label="false">
<default-constraints vsize-policy="3" hsize-policy="3" anchor="0" fill="3" />
</item>
<item class="javax.swing.JScrollPane" icon="/com/intellij/uiDesigner/icons/scrollPane.svg" removable="false" auto-create-binding="false" can-attach-label="true">
<default-constraints vsize-policy="7" hsize-policy="7" anchor="0" fill="3" />
</item>
<item class="javax.swing.JButton" icon="/com/intellij/uiDesigner/icons/button.svg" removable="false" auto-create-binding="true" can-attach-label="false">
<default-constraints vsize-policy="0" hsize-policy="3" anchor="0" fill="1" />
<initial-values>
<property name="text" value="Button" />
</initial-values>
</item>
<item class="javax.swing.JRadioButton" icon="/com/intellij/uiDesigner/icons/radioButton.svg" removable="false" auto-create-binding="true" can-attach-label="false">
<default-constraints vsize-policy="0" hsize-policy="3" anchor="8" fill="0" />
<initial-values>
<property name="text" value="RadioButton" />
</initial-values>
</item>
<item class="javax.swing.JCheckBox" icon="/com/intellij/uiDesigner/icons/checkBox.svg" removable="false" auto-create-binding="true" can-attach-label="false">
<default-constraints vsize-policy="0" hsize-policy="3" anchor="8" fill="0" />
<initial-values>
<property name="text" value="CheckBox" />
</initial-values>
</item>
<item class="javax.swing.JLabel" icon="/com/intellij/uiDesigner/icons/label.svg" removable="false" auto-create-binding="false" can-attach-label="false">
<default-constraints vsize-policy="0" hsize-policy="0" anchor="8" fill="0" />
<initial-values>
<property name="text" value="Label" />
</initial-values>
</item>
<item class="javax.swing.JTextField" icon="/com/intellij/uiDesigner/icons/textField.svg" removable="false" auto-create-binding="true" can-attach-label="true">
<default-constraints vsize-policy="0" hsize-policy="6" anchor="8" fill="1">
<preferred-size width="150" height="-1" />
</default-constraints>
</item>
<item class="javax.swing.JPasswordField" icon="/com/intellij/uiDesigner/icons/passwordField.svg" removable="false" auto-create-binding="true" can-attach-label="true">
<default-constraints vsize-policy="0" hsize-policy="6" anchor="8" fill="1">
<preferred-size width="150" height="-1" />
</default-constraints>
</item>
<item class="javax.swing.JFormattedTextField" icon="/com/intellij/uiDesigner/icons/formattedTextField.svg" removable="false" auto-create-binding="true" can-attach-label="true">
<default-constraints vsize-policy="0" hsize-policy="6" anchor="8" fill="1">
<preferred-size width="150" height="-1" />
</default-constraints>
</item>
<item class="javax.swing.JTextArea" icon="/com/intellij/uiDesigner/icons/textArea.svg" removable="false" auto-create-binding="true" can-attach-label="true">
<default-constraints vsize-policy="6" hsize-policy="6" anchor="0" fill="3">
<preferred-size width="150" height="50" />
</default-constraints>
</item>
<item class="javax.swing.JTextPane" icon="/com/intellij/uiDesigner/icons/textPane.svg" removable="false" auto-create-binding="true" can-attach-label="true">
<default-constraints vsize-policy="6" hsize-policy="6" anchor="0" fill="3">
<preferred-size width="150" height="50" />
</default-constraints>
</item>
<item class="javax.swing.JEditorPane" icon="/com/intellij/uiDesigner/icons/editorPane.svg" removable="false" auto-create-binding="true" can-attach-label="true">
<default-constraints vsize-policy="6" hsize-policy="6" anchor="0" fill="3">
<preferred-size width="150" height="50" />
</default-constraints>
</item>
<item class="javax.swing.JComboBox" icon="/com/intellij/uiDesigner/icons/comboBox.svg" removable="false" auto-create-binding="true" can-attach-label="true">
<default-constraints vsize-policy="0" hsize-policy="2" anchor="8" fill="1" />
</item>
<item class="javax.swing.JTable" icon="/com/intellij/uiDesigner/icons/table.svg" removable="false" auto-create-binding="true" can-attach-label="false">
<default-constraints vsize-policy="6" hsize-policy="6" anchor="0" fill="3">
<preferred-size width="150" height="50" />
</default-constraints>
</item>
<item class="javax.swing.JList" icon="/com/intellij/uiDesigner/icons/list.svg" removable="false" auto-create-binding="true" can-attach-label="false">
<default-constraints vsize-policy="6" hsize-policy="2" anchor="0" fill="3">
<preferred-size width="150" height="50" />
</default-constraints>
</item>
<item class="javax.swing.JTree" icon="/com/intellij/uiDesigner/icons/tree.svg" removable="false" auto-create-binding="true" can-attach-label="false">
<default-constraints vsize-policy="6" hsize-policy="6" anchor="0" fill="3">
<preferred-size width="150" height="50" />
</default-constraints>
</item>
<item class="javax.swing.JTabbedPane" icon="/com/intellij/uiDesigner/icons/tabbedPane.svg" removable="false" auto-create-binding="true" can-attach-label="false">
<default-constraints vsize-policy="3" hsize-policy="3" anchor="0" fill="3">
<preferred-size width="200" height="200" />
</default-constraints>
</item>
<item class="javax.swing.JSplitPane" icon="/com/intellij/uiDesigner/icons/splitPane.svg" removable="false" auto-create-binding="false" can-attach-label="false">
<default-constraints vsize-policy="3" hsize-policy="3" anchor="0" fill="3">
<preferred-size width="200" height="200" />
</default-constraints>
</item>
<item class="javax.swing.JSpinner" icon="/com/intellij/uiDesigner/icons/spinner.svg" removable="false" auto-create-binding="true" can-attach-label="true">
<default-constraints vsize-policy="0" hsize-policy="6" anchor="8" fill="1" />
</item>
<item class="javax.swing.JSlider" icon="/com/intellij/uiDesigner/icons/slider.svg" removable="false" auto-create-binding="true" can-attach-label="false">
<default-constraints vsize-policy="0" hsize-policy="6" anchor="8" fill="1" />
</item>
<item class="javax.swing.JSeparator" icon="/com/intellij/uiDesigner/icons/separator.svg" removable="false" auto-create-binding="false" can-attach-label="false">
<default-constraints vsize-policy="6" hsize-policy="6" anchor="0" fill="3" />
</item>
<item class="javax.swing.JProgressBar" icon="/com/intellij/uiDesigner/icons/progressbar.svg" removable="false" auto-create-binding="true" can-attach-label="false">
<default-constraints vsize-policy="0" hsize-policy="6" anchor="0" fill="1" />
</item>
<item class="javax.swing.JToolBar" icon="/com/intellij/uiDesigner/icons/toolbar.svg" removable="false" auto-create-binding="false" can-attach-label="false">
<default-constraints vsize-policy="0" hsize-policy="6" anchor="0" fill="1">
<preferred-size width="-1" height="20" />
</default-constraints>
</item>
<item class="javax.swing.JToolBar$Separator" icon="/com/intellij/uiDesigner/icons/toolbarSeparator.svg" removable="false" auto-create-binding="false" can-attach-label="false">
<default-constraints vsize-policy="0" hsize-policy="0" anchor="0" fill="1" />
</item>
<item class="javax.swing.JScrollBar" icon="/com/intellij/uiDesigner/icons/scrollbar.svg" removable="false" auto-create-binding="true" can-attach-label="false">
<default-constraints vsize-policy="6" hsize-policy="0" anchor="0" fill="2" />
</item>
</group>
</component>
</project>

6
.idea/vcs.xml Normal file
View File

@ -0,0 +1,6 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="VcsDirectoryMappings">
<mapping directory="$PROJECT_DIR$" vcs="Git" />
</component>
</project>

View File

@ -1 +0,0 @@
Druhe zadanie je v branchi master

View File

@ -1,32 +0,0 @@
package company;
import company.space.hmlovina;
import company.space.vesmir;
import company.vesminetelesa.*;
import java.awt.desktop.SystemEventListener;
public class Main {
public static void main(String[] args) {
vesmir mojprvyvesmir = new vesmir();
mojprvyvesmir.startVesmir();
hmlovina MiroSmajda = new hmlovina();
hmlovina PetoCmorik = new hmlovina();
hmlovina KatkaKnechtova = new hmlovina();
Planeta X = new Planeta("X",5972200,12756);
X.setPlanetaryWeight(-2000000);
Hviezda Slnko = new Hviezda("Slnko",696340,4603);
Slnecna_sustava Solar = new Slnecna_sustava();
Solar.addTeleso(X);
Galaxia Mliecna_Cesta = new Galaxia();
Mliecna_Cesta.addSustava(Solar);
Vesmirne_telesa Cierna_diera = new Vesmirne_telesa();
Mliecna_Cesta.addTeleso(Cierna_diera);
System.out.printf("Typ telesa: %s",Cierna_diera.getType());
Galaxia Hudak = new Galaxia();
}
}

Binary file not shown.

BIN
lib/groovy-4.0.2.jar Normal file

Binary file not shown.

Binary file not shown.

BIN
lib/groovy-ant-4.0.2.jar Normal file

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

BIN
lib/groovy-ginq-4.0.2.jar Normal file

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

BIN
lib/groovy-jmx-4.0.2.jar Normal file

Binary file not shown.

Binary file not shown.

BIN
lib/groovy-json-4.0.2.jar Normal file

Binary file not shown.

Binary file not shown.

BIN
lib/groovy-jsr223-4.0.2.jar Normal file

Binary file not shown.

Binary file not shown.

BIN
lib/groovy-macro-4.0.2.jar Normal file

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

BIN
lib/groovy-nio-4.0.2.jar Normal file

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

BIN
lib/groovy-sql-4.0.2.jar Normal file

Binary file not shown.

Binary file not shown.

BIN
lib/groovy-swing-4.0.2.jar Normal file

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

BIN
lib/groovy-test-4.0.2.jar Normal file

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

BIN
lib/groovy-testng-4.0.2.jar Normal file

Binary file not shown.

Binary file not shown.

BIN
lib/groovy-toml-4.0.2.jar Normal file

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

BIN
lib/groovy-xml-4.0.2.jar Normal file

Binary file not shown.

23
node_modules/.package-lock.json generated vendored Normal file
View File

@ -0,0 +1,23 @@
{
"name": "test",
"lockfileVersion": 2,
"requires": true,
"packages": {
"node_modules/git": {
"version": "0.1.5",
"resolved": "https://registry.npmjs.org/git/-/git-0.1.5.tgz",
"integrity": "sha512-N+bfOrXyKMU/fQtCj6D/U9MQOEN0DAA8TLHSLdUQRSWBOkeRvsjJHdrdkvcq05xO7GSDKWc3nDEGoTZ4DfCCSg==",
"dependencies": {
"mime": "1.2.9"
},
"engines": {
"node": ">=0.4.0"
}
},
"node_modules/mime": {
"version": "1.2.9",
"resolved": "https://registry.npmjs.org/mime/-/mime-1.2.9.tgz",
"integrity": "sha512-WiLgbHTIq5AYUvU/Luli4mZ1bUcHpGNHyCsbl+KPMg4zt+XUDpQehWjuBjdLaEvDTinvKj/FgfQt3fPoT7j08g=="
}
}
}

2
node_modules/git/.npmignore generated vendored Normal file
View File

@ -0,0 +1,2 @@
makefile
test/

4
node_modules/git/.travis.yml generated vendored Normal file
View File

@ -0,0 +1,4 @@
language: node_js
node_js:
- '0.10'
- '0.11'

48
node_modules/git/README.md generated vendored Normal file
View File

@ -0,0 +1,48 @@
[![build status](https://secure.travis-ci.org/christkv/node-git.png)](http://travis-ci.org/christkv/node-git)
# Introduction
This is a library for Git written in Node.js. It's as close a port of grit http://github.com/mojombo/grit.
The idea is to allow for manipulation of git repositories by the node.js application. Not everything is
implemented directly in node-git. Some of the stuff is using the native git command line instead of
direct javascript code. Also it's fairly synchronous right now but that will hopefully change a little
by little over time as it gets more stable and I start using it in real life scenarios.
## Github information
The source code is available at http://github.com/christkv/node-git.
You can either clone the repository or download a tarball of the latest release.
Once you have the source you can test the driver by running
$ make test
On windows:
PS > node.exe .\node_modules\nodeunit\bin\nodeunit .\test
## Examples
For simple examples of usage look at the tests included in the repository.
## Notes
The current version is basic git support, don't expect everything to work as you expect it
off the bat.
## License
Copyright 2009 - 2010 Christian Amor Kvalheim.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

150
node_modules/git/benchmarks/benchmark.js generated vendored Normal file
View File

@ -0,0 +1,150 @@
var Repo = require('git/repo').Repo,
util = require('util'),
fs = require('fs'),
exec = require('child_process').exec;
var number_of_executions = 30;
var create_tmp_directory = function(clone_path, callback) {
var filename = 'git_test' + new Date().getTime().toString() + Math.round((Math.random(100000) * 300)).toString();
var tmp_path = '/tmp/' + filename;
// Create directory
fs.mkdirSync(tmp_path, 0777);
// Copy the old directory to the new one
var child = exec('cp -R ' + clone_path + ' ' + tmp_path, function (error, stdout, stderr) {
if (error !== null) {
util.puts('exec error: ' + error);
return callback(error, null);
}
return callback(null, tmp_path);
});
}
var destroy_directory = function(directory, callback) {
// Copy the old directory to the new one
var child = exec('rm -rf ' + directory, function (error, stdout, stderr) {
if (error !== null) {
util.puts('exec error: ' + error);
return callback(error, null);
}
return callback(null, null);
});
}
var commit1 = '5e3ee1198672257164ce3fe31dea3e40848e68d5'
var commit2 = 'ca8a30f5a7f0f163bbe3b6f0abf18a6c83b0687a'
var pack_object_function = function(repo) {
repo.commit('5e3ee1198672257164ce3fe31dea3e40848e68d5', function(err, commit) {
repo.tree('cd7422af5a2e0fff3e94d6fb1a8fff03b2841881', function(err, tree) {
repo.blob('4232d073306f01cf0b895864e5a5cfad7dd76fce', function(err, blob) {
commit.parents[0].parents[0].parents[0];
})
})
});
}
var commits1_function = function(repo) {
repo.commits(function(err, commits) {
commits.length;
})
}
var commits2_function = function(repo) {
repo.commits('master', 15, function(err, log) {
log.length;
log.length;
log[0];
repo.commits('testing', function(err, commits) {
commits.map(function(c) { return c.message; });
})
})
}
var big_revlist_function = function(repo) {
repo.commits('master', 200, function(err, commits) {});
}
var log_function = function(repo) {
repo.log('master', function(err, log) {
log.length;
log.length;
log[0];
})
}
var diff_function = function(repo) {
repo.diff(commit1, commit2, function(err, diff) {});
}
var commit_diff_function = function(repo) {
repo.commit_diff(commit1, function(err, diff) {});
}
var heads_function = function(repo) {
repo.heads(function(err, heads) {
heads.map(function(b) { return b.commit.id; });
});
}
var execute_process = function(type) {
var execute_function = null;
// Check that we have the right function
if(type == "packobj") {
execute_function = pack_object_function;
} else if(type == "commits1") {
execute_function = commits1_function;
} else if(type == "commits2") {
execute_function = commits2_function;
} else if(type == "big_revlist") {
execute_function = big_revlist_function;
} else if(type == "log") {
execute_function = log_function;
} else if(type == "diff") {
execute_function = diff_function;
} else if(type == "commit_diff") {
execute_function = commit_diff_function;
} else if(type == "heads") {
execute_function = heads_function;
}
// Ensure that we have an executable function
if(execute_function) {
// Creat temp directory
create_tmp_directory("/Users/christian.kvalheim/coding/checkouts/grit/test/dot_git", function(err, target_path) {
// Open the repo
new Repo(target_path + "/dot_git", {is_bare:true}, function(err, repo) {
var start_time = new Date();
// Execute the benchmark x number of times if a function is defined
for(var i = 0; i < number_of_executions; i++) {
execute_function(repo);
}
var end_time = new Date();
var total_miliseconds = end_time.getTime() - start_time.getTime();
util.puts("[" + type + "]::executed in: " + (total_miliseconds/1000) + " seconds");
// Delete the directory
destroy_directory(target_path, function(err, result) {});
});
});
}
}
if(process.argv.length > 2 && process.argv[2].match(/packobj|commits1|commits2|big_revlist|log|diff|commit_diff|heads|all/)) {
if(process.argv[2] == "all") {
var tests = ["packobj", "commits1", "commits2", "big_revlist", "log", "diff", "commit_diff", "heads"];
// var tests = ["packobj", "commits1", "commits2", "big_revlist", "log"];
tests.forEach(function(t) {
execute_process(t);
})
} else {
execute_process(process.argv[2]);
}
} else {
util.puts("Please provide the benchmark you wish to run in the form <node benchmarks [packobj|commits1|commits2|big_revlist|log|diff|commit_diff|heads|all]>")
}

33
node_modules/git/lib/diff/block.js generated vendored Normal file
View File

@ -0,0 +1,33 @@
// A block is an operation removing, adding, or changing a group of items.
// Basically, this is just a list of changes, where each change adds or
// deletes a single item. Used by bin/ldiff.
var Block = exports.Block = function(chunk) {
this.changes = [];
this.insert = [];
this.remove = [];
var self = this;
chunk.forEach(function(item) {
self.changes.push(item);
if(item.is_deleting()) self.remove.push(item);
if(item.is_adding()) self.insert.push(item);
})
Object.defineProperty(this, "diff_size", { get: function() { return self.insert.length - self.remove.length; }, enumerable: true});
Object.defineProperty(this, "op", { get: function() {
var result = [self.remove.length == 0, self.insert.length == 0];
if(!result[0] && !result[1]) {
return "!";
} else if(!result[0] && result[1]) {
return "-";
} else if(result[0] && result[1]) {
return "+";
} else {
return "^";
}
}, enumerable: true});
}
Block.prototype.op = function() {
}

311
node_modules/git/lib/diff/callbacks.js generated vendored Normal file
View File

@ -0,0 +1,311 @@
var ContextChange = require('./change').ContextChange,
Change = require('./change').Change;
// This callback object implements the default set of callback events, which
// only returns the event itself. Note that //finished_a and //finished_b are
// not implemented -- I haven't yet figured out where they would be useful.
//
// Note that this is intended to be called as is, e.g.,
DefaultCallbacks = exports.DefaultCallbacks = function() {
}
// Called when two items match.
DefaultCallbacks.prototype.match = function(event) {
return event;
}
// Called when the old value is discarded in favour of the new value.
DefaultCallbacks.prototype.discard_a = function(event) {
return event;
}
// Called when the new value is discarded in favour of the old value.
DefaultCallbacks.prototype.discard_b = function(event) {
return event;
}
// Called when both the old and new values have changed.
DefaultCallbacks.prototype.change = function(event) {
return event;
}
// An alias for DefaultCallbacks that is used in Diff::LCS#traverse_sequences.
SequenceCallbacks = exports.SequenceCallbacks = DefaultCallbacks;
// An alias for DefaultCallbacks that is used in Diff::LCS#traverse_balanced.
BalancedCallbacks = exports.BalancedCallbacks = DefaultCallbacks;
// This will produce a compound array of simple diff change objects. Each
// element in the //diffs array is a +hunk+ or +hunk+ array, where each
// element in each +hunk+ array is a single Change object representing the
// addition or removal of a single element from one of the two tested
// sequences. The +hunk+ provides the full context for the changes.
//
// diffs = Diff::LCS.diff(seq1, seq2)
// // This example shows a simplified array format.
// // [ [ [ '-', 0, 'a' ] ], // 1
// // [ [ '+', 2, 'd' ] ], // 2
// // [ [ '-', 4, 'h' ], // 3
// // [ '+', 4, 'f' ] ],
// // [ [ '+', 6, 'k' ] ], // 4
// // [ [ '-', 8, 'n' ], // 5
// // [ '-', 9, 'p' ],
// // [ '+', 9, 'r' ],
// // [ '+', 10, 's' ],
// // [ '+', 11, 't' ] ] ]
//
// There are five hunks here. The first hunk says that the +a+ at position 0
// of the first sequence should be deleted (<tt>'-'</tt>). The second hunk
// says that the +d+ at position 2 of the second sequence should be inserted
// (<tt>'+'</tt>). The third hunk says that the +h+ at position 4 of the
// first sequence should be removed and replaced with the +f+ from position 4
// of the second sequence. The other two hunks are described similarly.
//
// === Use
// This callback object must be initialised and is used by the Diff::LCS//diff
// method.
//
// cbo = Diff::LCS::DiffCallbacks.new
// Diff::LCS.LCS(seq1, seq2, cbo)
// cbo.finish
//
// Note that the call to //finish is absolutely necessary, or the last set of
// changes will not be visible. Alternatively, can be used as:
//
// cbo = Diff::LCS::DiffCallbacks.new { |tcbo| Diff::LCS.LCS(seq1, seq2, tcbo) }
//
// The necessary //finish call will be made.
//
// === Simplified Array Format
// The simplified array format used in the example above can be obtained
// with:
//
// require 'pp'
// pp diffs.map { |e| e.map { |f| f.to_a } }
DiffCallbacks = exports.DiffCallbacks = function(block) {
this.hunk = [];
this.diffs = [];
if(block != null) {
block(this);
this.finish();
}
}
// Finalizes the diff process. If an unprocessed hunk still exists, then it
// is appended to the diff list.
DiffCallbacks.prototype.finish = function() {
add_nonempty_hunk(this);
}
DiffCallbacks.prototype.match = function(event) {
add_nonempty_hunk(this);
}
DiffCallbacks.prototype.discard_a = function(event) {
this.hunk.push(new Change('-', event.old_position, event.old_element));
}
DiffCallbacks.prototype.discard_b = function(event) {
this.hunk.push(new Change('+', event.new_position, event.new_element));
}
var add_nonempty_hunk = function(diff_callback) {
if(diff_callback.hunk.length > 0) diff_callback.diffs.push(diff_callback.hunk);
diff_callback.hunk = [];
}
// This will produce a simple array of diff change objects. Each element in
// the //diffs array is a single ContextChange. In the set of //diffs provided
// by SDiffCallbacks, both old and new objects will be presented for both
// changed <strong>and unchanged</strong> objects. +nil+ will be substituted
// for a discarded object.
//
// The diffset produced by this callback, when provided to Diff::LCS//sdiff,
// will compute and display the necessary components to show two sequences
// and their minimized differences side by side, just like the Unix utility
// +sdiff+.
//
// same same
// before | after
// old < -
// - > new
//
// seq1 = %w(a b c e h j l m n p)
// seq2 = %w(b c d e f j k l m r s t)
//
// diffs = Diff::LCS.sdiff(seq1, seq2)
// // This example shows a simplified array format.
// // [ [ "-", [ 0, "a"], [ 0, nil ] ],
// // [ "=", [ 1, "b"], [ 0, "b" ] ],
// // [ "=", [ 2, "c"], [ 1, "c" ] ],
// // [ "+", [ 3, nil], [ 2, "d" ] ],
// // [ "=", [ 3, "e"], [ 3, "e" ] ],
// // [ "!", [ 4, "h"], [ 4, "f" ] ],
// // [ "=", [ 5, "j"], [ 5, "j" ] ],
// // [ "+", [ 6, nil], [ 6, "k" ] ],
// // [ "=", [ 6, "l"], [ 7, "l" ] ],
// // [ "=", [ 7, "m"], [ 8, "m" ] ],
// // [ "!", [ 8, "n"], [ 9, "r" ] ],
// // [ "!", [ 9, "p"], [ 10, "s" ] ],
// // [ "+", [ 10, nil], [ 11, "t" ] ] ]
//
// The result of this operation is similar to that of
// Diff::LCS::ContextDiffCallbacks. They may be compared as:
//
// s = Diff::LCS.sdiff(seq1, seq2).reject { |e| e.action == "=" }
// c = Diff::LCS.sdiff(seq1, seq2, Diff::LCS::ContextDiffCallbacks).flatten
//
// s == c // -> true
//
// === Use
// This callback object must be initialised and is used by the Diff::LCS//sdiff
// method.
//
// cbo = Diff::LCS::SDiffCallbacks.new
// Diff::LCS.LCS(seq1, seq2, cbo)
//
// As with the other initialisable callback objects, Diff::LCS::SDiffCallbacks
// can be initialised with a block. As there is no "fininishing" to be done,
// this has no effect on the state of the object.
//
// cbo = Diff::LCS::SDiffCallbacks.new { |tcbo| Diff::LCS.LCS(seq1, seq2, tcbo) }
//
// === Simplified Array Format
// The simplified array format used in the example above can be obtained
// with:
//
// require 'pp'
// pp diffs.map { |e| e.to_a }
SDiffCallbacks = exports.SDiffCallbacks = function(block) {
this.diffs = [];
if(block != null) {
block(this);
this.finish();
}
}
SDiffCallbacks.prototype.match = function(event) {
this.diffs.push(ContextChange.simplify(event));
}
SDiffCallbacks.prototype.discard_a = function(event) {
this.diffs.push(ContextChange.simplify(event));
}
SDiffCallbacks.prototype.discard_b = function(event) {
this.diffs.push(ContextChange.simplify(event));
}
SDiffCallbacks.prototype.change = function(event) {
this.diffs.push(ContextChange.simplify(event));
}
// This will produce a compound array of contextual diff change objects. Each
// element in the //diffs array is a "hunk" array, where each element in each
// "hunk" array is a single change. Each change is a Diff::LCS::ContextChange
// that contains both the old index and new index values for the change. The
// "hunk" provides the full context for the changes. Both old and new objects
// will be presented for changed objects. +nil+ will be substituted for a
// discarded object.
//
// seq1 = %w(a b c e h j l m n p)
// seq2 = %w(b c d e f j k l m r s t)
//
// diffs = Diff::LCS.diff(seq1, seq2, Diff::LCS::ContextDiffCallbacks)
// // This example shows a simplified array format.
// // [ [ [ '-', [ 0, 'a' ], [ 0, nil ] ] ], // 1
// // [ [ '+', [ 3, nil ], [ 2, 'd' ] ] ], // 2
// // [ [ '-', [ 4, 'h' ], [ 4, nil ] ], // 3
// // [ '+', [ 5, nil ], [ 4, 'f' ] ] ],
// // [ [ '+', [ 6, nil ], [ 6, 'k' ] ] ], // 4
// // [ [ '-', [ 8, 'n' ], [ 9, nil ] ], // 5
// // [ '+', [ 9, nil ], [ 9, 'r' ] ],
// // [ '-', [ 9, 'p' ], [ 10, nil ] ],
// // [ '+', [ 10, nil ], [ 10, 's' ] ],
// // [ '+', [ 10, nil ], [ 11, 't' ] ] ] ]
//
// The five hunks shown are comprised of individual changes; if there is a
// related set of changes, they are still shown individually.
//
// This callback can also be used with Diff::LCS//sdiff, which will produce
// results like:
//
// diffs = Diff::LCS.sdiff(seq1, seq2, Diff::LCS::ContextCallbacks)
// // This example shows a simplified array format.
// // [ [ [ "-", [ 0, "a" ], [ 0, nil ] ] ], // 1
// // [ [ "+", [ 3, nil ], [ 2, "d" ] ] ], // 2
// // [ [ "!", [ 4, "h" ], [ 4, "f" ] ] ], // 3
// // [ [ "+", [ 6, nil ], [ 6, "k" ] ] ], // 4
// // [ [ "!", [ 8, "n" ], [ 9, "r" ] ], // 5
// // [ "!", [ 9, "p" ], [ 10, "s" ] ],
// // [ "+", [ 10, nil ], [ 11, "t" ] ] ] ]
//
// The five hunks are still present, but are significantly shorter in total
// presentation, because changed items are shown as changes ("!") instead of
// potentially "mismatched" pairs of additions and deletions.
//
// The result of this operation is similar to that of
// Diff::LCS::SDiffCallbacks. They may be compared as:
//
// s = Diff::LCS.sdiff(seq1, seq2).reject { |e| e.action == "=" }
// c = Diff::LCS.sdiff(seq1, seq2, Diff::LCS::ContextDiffCallbacks).flatten
//
// s == c // -> true
//
// === Use
// This callback object must be initialised and can be used by the
// Diff::LCS//diff or Diff::LCS//sdiff methods.
//
// cbo = Diff::LCS::ContextDiffCallbacks.new
// Diff::LCS.LCS(seq1, seq2, cbo)
// cbo.finish
//
// Note that the call to //finish is absolutely necessary, or the last set of
// changes will not be visible. Alternatively, can be used as:
//
// cbo = Diff::LCS::ContextDiffCallbacks.new { |tcbo| Diff::LCS.LCS(seq1, seq2, tcbo) }
//
// The necessary //finish call will be made.
//
// === Simplified Array Format
// The simplified array format used in the example above can be obtained
// with:
//
// require 'pp'
// pp diffs.map { |e| e.map { |f| f.to_a } }
ContextDiffCallbacks = exports.ContextDiffCallbacks = function(block) {
this.hunk = [];
this.diffs = [];
if(block != null) {
block(this);
this.finish();
}
}
ContextDiffCallbacks.prototype.finish = function() {
add_nonempty_hunk(this);
}
ContextDiffCallbacks.prototype.discard_a = function(event) {
this.hunk.push(ContextChange.simplify(event));
}
ContextDiffCallbacks.prototype.discard_b = function(event) {
this.hunk.push(ContextChange.simplify(event));
}
ContextDiffCallbacks.prototype.match = function(event) {
this.hunk.push(ContextChange.simplify(event));
}

105
node_modules/git/lib/diff/change.js generated vendored Normal file
View File

@ -0,0 +1,105 @@
// Represents a simplistic (non-contextual) change. Represents the removal or
// addition of an element from either the old or the new sequenced enumerable.
var Change = exports.Change = function(action, position, element) {
this.action = action;
this.position = position;
this.element = element;
}
Change.from_a = function(arr) {
return new Change(arr[0], arr[1], arr[2]);
}
Change.prototype.to_a = function() {
return [this.action, this.position, this.element];
}
Change.prototype.is_deleting = function() {
return this.action == '-';
}
Change.prototype.is_adding = function() {
return this.action == '+';
}
Change.prototype.is_unchanged = function() {
return this.action == '=';
}
Change.prototype.is_changed = function() {
return this.changed == '!';
}
Change.prototype.is_finished_a = function() {
return this.changed == '>';
}
Change.prototype.is_finished_b = function() {
return this.changed == '<';
}
var ContextChange = exports.ContextChange = function(action, old_position, old_element, new_position, new_element) {
this.action = action;
this.old_position = old_position;
this.old_element = old_element;
this.new_position = new_position;
this.new_element = new_element;
}
// Creates a ContextChange from an array produced by ContextChange#to_a.
ContextChange.from_a = function(arr) {
if(arr.length == 5) {
return new ContextChange(arr[0], arr[1], arr[2], arr[3], arr[4]);
} else {
return new ContextChange(arr[0], arr[1][0], arr[1][1], arr[2][0], arr[2][1]);
}
}
// Simplifies a context change for use in some diff callbacks. '<' actions
// are converted to '-' and '>' actions are converted to '+'.
ContextChange.simplify = function(event) {
var ea = event.to_a();
if(ea[0] == '-') {
ea[2][1] = null;
} else if(ea[0] == '<') {
ea[0] = '-';
ea[2][1] = null;
} else if(ea[0] == '+') {
ea[1][1] = null;
} else if(ea[0] == '>') {
ea[0] = '+';
ea[1][1] = null;
}
// Return a Context Change object
return ContextChange.from_a(ea);
}
ContextChange.prototype.to_a = function() {
return [this.action, [this.old_position, this.old_element], [this.new_position, this.new_element]];
}
ContextChange.prototype.is_deleting = function() {
return this.action == '-';
}
ContextChange.prototype.is_adding = function() {
return this.action == '+';
}
ContextChange.prototype.is_unchanged = function() {
return this.action == '=';
}
ContextChange.prototype.is_changed = function() {
return this.changed == '!';
}
ContextChange.prototype.is_finished_a = function() {
return this.changed == '>';
}
ContextChange.prototype.is_finished_b = function() {
return this.changed == '<';
}

892
node_modules/git/lib/diff/diff.js generated vendored Normal file
View File

@ -0,0 +1,892 @@
var util = require('util'),
SequenceCallbacks = require('./callbacks').SequenceCallbacks,
ContextChange = require('./change').ContextChange,
Change = require('./change').Change,
DiffCallbacks = require('./callbacks').DiffCallbacks,
SDiffCallbacks = require('./callbacks').SDiffCallbacks,
BalancedCallbacks = require('./callbacks').BalancedCallbacks,
ContextDiffCallbacks = require('./callbacks').ContextDiffCallbacks,
Hunk = require('./hunk').Hunk;
var Difference = exports.Difference = function() {
}
Difference.LCS = function() {
}
// Scope the Sequence Callbacks class
Difference.LCS.SequenceCallbacks = SequenceCallbacks;
Difference.LCS.ContextChange = ContextChange;
Difference.LCS.DiffCallbacks = DiffCallbacks;
Difference.LCS.SDiffCallbacks = SDiffCallbacks;
Difference.LCS.BalancedCallbacks = BalancedCallbacks;
Difference.LCS.ContextDiffCallbacks = ContextDiffCallbacks;
Difference.LCS.Change = Change;
Difference.LCS.Hunk = Hunk;
// Diff::LCS.sdiff computes all necessary components to show two sequences
// and their minimized differences side by side, just like the Unix
// utility <em>sdiff</em> does:
//
// old < -
// same same
// before | after
// - > new
//
// See Diff::LCS::SDiffCallbacks for the default behaviour. An alternate
// behaviour may be implemented with Diff::LCS::ContextDiffCallbacks. If
// a Class argument is provided for +callbacks+, //diff will attempt to
// initialise it. If the +callbacks+ object (possibly initialised)
// responds to //finish, it will be called.
Difference.LCS.sdiff = function(seq1, seq2, callbacks, block) {
callbacks = callbacks != null ? callbacks : Difference.LCS.SDiffCallbacks;
if(Object.prototype.toString.call(callbacks) == "[object Function]") {
callbacks = new callbacks();
}
// Traverse the sequence
Difference.LCS.traverse_balanced(seq1, seq2, callbacks);
if(callbacks.finish != null) callbacks.finish();
if(block != null) {
var res = callbacks.diffs.map(function(hunk) {
if(Array.isArray(hunk)) {
hunk = hunk.map(function(v) { return block(v); });
} else {
block(hunk);
}
});
return res;
} else {
return callbacks.diffs;
}
}
// Diff::LCS.diff computes the smallest set of additions and deletions
// necessary to turn the first sequence into the second, and returns a
// description of these changes.
//
// See Diff::LCS::DiffCallbacks for the default behaviour. An alternate
// behaviour may be implemented with Diff::LCS::ContextDiffCallbacks.
// If a Class argument is provided for +callbacks+, //diff will attempt
// to initialise it. If the +callbacks+ object (possibly initialised)
// responds to //finish, it will be called.
Difference.LCS.diff = function(seq1, seq2, callbacks, block) {
callbacks = callbacks != null ? callbacks : Difference.LCS.DiffCallbacks;
if(Object.prototype.toString.call(callbacks) == "[object Function]") {
callbacks = new callbacks();
}
// Traverse the sequence
Difference.LCS.traverse_sequences(seq1, seq2, callbacks);
if(callbacks.finish != null) callbacks.finish();
if(block != null) {
var res = callbacks.diffs.map(function(hunk) {
if(Array.isArray(hunk)) {
hunk = hunk.map(function(v) { return block(v); });
} else {
block(hunk);
}
});
return res;
} else {
return callbacks.diffs;
}
}
// Diff::LCS.traverse_sequences is the most general facility provided by this
// module; +diff+ and +LCS+ are implemented as calls to it.
//
// The arguments to //traverse_sequences are the two sequences to
// traverse, and a callback object, like this:
//
// traverse_sequences(seq1, seq2, Diff::LCS::ContextDiffCallbacks.new)
//
// //diff is implemented with //traverse_sequences.
//
// == Callback Methods
// Optional callback methods are <em>emphasized</em>.
//
// callbacks//match:: Called when +a+ and +b+ are pointing
// to common elements in +A+ and +B+.
// callbacks//discard_a:: Called when +a+ is pointing to an
// element not in +B+.
// callbacks//discard_b:: Called when +b+ is pointing to an
// element not in +A+.
// <em>callbacks//finished_a</em>:: Called when +a+ has reached the end of
// sequence +A+.
// <em>callbacks//finished_b</em>:: Called when +b+ has reached the end of
// sequence +B+.
//
// == Algorithm
// a---+
// v
// A = a b c e h j l m n p
// B = b c d e f j k l m r s t
// ^
// b---+
//
// If there are two arrows (+a+ and +b+) pointing to elements of
// sequences +A+ and +B+, the arrows will initially point to the first
// elements of their respective sequences. //traverse_sequences will
// advance the arrows through the sequences one element at a time,
// calling a method on the user-specified callback object before each
// advance. It will advance the arrows in such a way that if there are
// elements <tt>A[ii]</tt> and <tt>B[jj]</tt> which are both equal and
// part of the longest common subsequence, there will be some moment
// during the execution of //traverse_sequences when arrow +a+ is pointing
// to <tt>A[ii]</tt> and arrow +b+ is pointing to <tt>B[jj]</tt>. When
// this happens, //traverse_sequences will call <tt>callbacks//match</tt>
// and then it will advance both arrows.
//
// Otherwise, one of the arrows is pointing to an element of its sequence
// that is not part of the longest common subsequence.
// //traverse_sequences will advance that arrow and will call
// <tt>callbacks//discard_a</tt> or <tt>callbacks//discard_b</tt>, depending
// on which arrow it advanced. If both arrows point to elements that are
// not part of the longest common subsequence, then //traverse_sequences
// will advance one of them and call the appropriate callback, but it is
// not specified which it will call.
//
// The methods for <tt>callbacks//match</tt>, <tt>callbacks//discard_a</tt>,
// and <tt>callbacks//discard_b</tt> are invoked with an event comprising
// the action ("=", "+", or "-", respectively), the indicies +ii+ and
// +jj+, and the elements <tt>A[ii]</tt> and <tt>B[jj]</tt>. Return
// values are discarded by //traverse_sequences.
//
// === End of Sequences
// If arrow +a+ reaches the end of its sequence before arrow +b+ does,
// //traverse_sequence try to call <tt>callbacks//finished_a</tt> with the
// last index and element of +A+ (<tt>A[-1]</tt>) and the current index
// and element of +B+ (<tt>B[jj]</tt>). If <tt>callbacks//finished_a</tt>
// does not exist, then <tt>callbacks//discard_b</tt> will be called on
// each element of +B+ until the end of the sequence is reached (the call
// will be done with <tt>A[-1]</tt> and <tt>B[jj]</tt> for each element).
//
// If +b+ reaches the end of +B+ before +a+ reaches the end of +A+,
// <tt>callbacks//finished_b</tt> will be called with the current index
// and element of +A+ (<tt>A[ii]</tt>) and the last index and element of
// +B+ (<tt>A[-1]</tt>). Again, if <tt>callbacks//finished_b</tt> does not
// exist on the callback object, then <tt>callbacks//discard_a</tt> will
// be called on each element of +A+ until the end of the sequence is
// reached (<tt>A[ii]</tt> and <tt>B[-1]</tt>).
//
// There is a chance that one additional <tt>callbacks//discard_a</tt> or
// <tt>callbacks//discard_b</tt> will be called after the end of the
// sequence is reached, if +a+ has not yet reached the end of +A+ or +b+
// has not yet reached the end of +B+.
Difference.LCS.traverse_sequences = function(seq1, seq2, callbacks, block) { // The block allows callbacks on change events
// Ensure that we have at least a default callback object
callbacks = callbacks != null ? callbacks : new Difference.LCS.SequenceCallbacks();
// Fetch the matches from the __lcs algorithm
var matches = Difference.LCS.__lcs(seq1, seq2);
var run_finished_a = false, run_finished_b = false;
var string = seq1.constructor == String;
var a_size = seq1.length, b_size = seq2.length;
var ai = 0, bj = 0;
var event = null;
for(var ii = 0; ii <= matches.length; ii++) {
var b_line = matches[ii];
var ax = string ? seq1.substr(ii, 1) : seq1[ii];
var bx = string ? seq2.substr(bj, bj + 1) : seq2[bj];
if(b_line == null) {
if(ax != null) {
event = new Difference.LCS.ContextChange('-', ii, ax, bj, bx);
if(block != null) event = block(event);
callbacks.discard_a(event);
}
} else {
while(bj < b_line) {
bx = string ? seq2.substr(bj, 1) : seq2[bj];
event = new Difference.LCS.ContextChange('+', ii, ax, bj, bx);
if(block != null) event = block(event);
callbacks.discard_b(event);
bj = bj + 1;
}
bx = string ? seq2.substr(bj, 1) : seq2[bj];
event = new Difference.LCS.ContextChange('=', ii, ax, bj, bx);
if(block != null) event = block(event);
callbacks.match(event);
bj = bj + 1;
}
// Update the ai with the current index point
ai = ii;
}
// Update pointer
ai = ai + 1;
// The last entry (if any) processed was a match. +ai+ and +bj+ point
// just past the last matching lines in their sequences.
while(ai < a_size || bj < b_size) {
// last A
if(ai == a_size && bj < b_size) {
if(callbacks.finished_a != null && !run_finished_a) {
ax = string ? seq1.substr(seq1.length - 1, 1) : seq1[seq1.length - 1];
bx = string ? seq2.substr(bj, 1) : seq2[bj];
event = new Difference.LCS.ContextChange('>', (a_size - 1), ax, bj, bx);
if(block != null) event = block(event);
callbacks.finished_a(event);
run_finished_a = true;
} else {
ax = string ? seq1.substr(ai, 1) : seq1[ai];
do {
bx = string ? seq2.substr(bj, 1) : seq2[bj];
event = new Difference.LCS.ContextChange('+', ai, ax, bj, bx);
if(block != null) event = block(event);
callbacks.discard_b(event);
bj = bj + 1;
} while(bj < b_size)
}
}
// last B?
if(bj == b_size && ai < a_size) {
if(callbacks.finished_b != null && !run_finished_b) {
ax = string ? seq1.substr(ai, 1) : seq1[ai];
bx = string ? seq2.substr(seq2.length - 1, 1) : seq2[seq2.length - 1];
event = new Difference.LCS.ContextChange('<', ai, ax, (b_size -1), bx);
if(block != null) event = block(event);
callbacks.finished_b(event);
run_finished_b = true;
} else {
bx = string ? seq2.substr(bj, 1) : seq2[bj];
do {
ax = string ? seq1.substr(ai, 1) : seq1[ai];
event = new Difference.LCS.ContextChange('-', ai, ax, bj, bx);
if(block != null) event = block(event);
callbacks.discard_a(event);
ai = ai + 1;
} while(bj < b_size)
}
}
if(ai < a_size) {
ax = string ? seq1.substr(ai, 1) : seq1[ai];
bx = string ? seq2.substr(bj, 1) : seq2[bj];
event = new Difference.LCS.ContextChange('-', ai, ax, bj, bx);
if(block != null) event = block(event);
callbacks.discard_a(event);
ai = ai + 1;
}
if(bj < b_size) {
ax = string ? seq1.substr(ai, 1) : seq1[ai];
bx = string ? seq2.substr(bj, 1) : seq2[bj];
event = new Difference.LCS.ContextChange('+', ai, ax, bj, bx);
if(block != null) event = block(event);
callbacks.discard_b(event);
bj = bj + 1;
}
}
}
// //traverse_balanced is an alternative to //traverse_sequences. It
// uses a different algorithm to iterate through the entries in the
// computed longest common subsequence. Instead of viewing the changes as
// insertions or deletions from one of the sequences, //traverse_balanced
// will report <em>changes</em> between the sequences. To represent a
//
// The arguments to //traverse_balanced are the two sequences to traverse
// and a callback object, like this:
//
// traverse_balanced(seq1, seq2, Diff::LCS::ContextDiffCallbacks.new)
//
// //sdiff is implemented with //traverse_balanced.
//
// == Callback Methods
// Optional callback methods are <em>emphasized</em>.
//
// callbacks//match:: Called when +a+ and +b+ are pointing
// to common elements in +A+ and +B+.
// callbacks//discard_a:: Called when +a+ is pointing to an
// element not in +B+.
// callbacks//discard_b:: Called when +b+ is pointing to an
// element not in +A+.
// <em>callbacks//change</em>:: Called when +a+ and +b+ are pointing
// to the same relative position, but
// <tt>A[a]</tt> and <tt>B[b]</tt> are
// not the same; a <em>change</em> has
// occurred.
//
// //traverse_balanced might be a bit slower than //traverse_sequences,
// noticable only while processing huge amounts of data.
//
// The +sdiff+ function of this module is implemented as call to
// //traverse_balanced.
//
// == Algorithm
// a---+
// v
// A = a b c e h j l m n p
// B = b c d e f j k l m r s t
// ^
// b---+
//
// === Matches
// If there are two arrows (+a+ and +b+) pointing to elements of
// sequences +A+ and +B+, the arrows will initially point to the first
// elements of their respective sequences. //traverse_sequences will
// advance the arrows through the sequences one element at a time,
// calling a method on the user-specified callback object before each
// advance. It will advance the arrows in such a way that if there are
// elements <tt>A[ii]</tt> and <tt>B[jj]</tt> which are both equal and
// part of the longest common subsequence, there will be some moment
// during the execution of //traverse_sequences when arrow +a+ is pointing
// to <tt>A[ii]</tt> and arrow +b+ is pointing to <tt>B[jj]</tt>. When
// this happens, //traverse_sequences will call <tt>callbacks//match</tt>
// and then it will advance both arrows.
//
// === Discards
// Otherwise, one of the arrows is pointing to an element of its sequence
// that is not part of the longest common subsequence.
// //traverse_sequences will advance that arrow and will call
// <tt>callbacks//discard_a</tt> or <tt>callbacks//discard_b</tt>,
// depending on which arrow it advanced.
//
// === Changes
// If both +a+ and +b+ point to elements that are not part of the longest
// common subsequence, then //traverse_sequences will try to call
// <tt>callbacks//change</tt> and advance both arrows. If
// <tt>callbacks//change</tt> is not implemented, then
// <tt>callbacks//discard_a</tt> and <tt>callbacks//discard_b</tt> will be
// called in turn.
//
// The methods for <tt>callbacks//match</tt>, <tt>callbacks//discard_a</tt>,
// <tt>callbacks//discard_b</tt>, and <tt>callbacks//change</tt> are
// invoked with an event comprising the action ("=", "+", "-", or "!",
// respectively), the indicies +ii+ and +jj+, and the elements
// <tt>A[ii]</tt> and <tt>B[jj]</tt>. Return values are discarded by
// //traverse_balanced.
//
// === Context
// Note that +ii+ and +jj+ may not be the same index position, even if
// +a+ and +b+ are considered to be pointing to matching or changed
// elements.
Difference.LCS.traverse_balanced = function(seq1, seq2, callbacks, block) {
// Ensure that we have at least a default callback object
callbacks = callbacks != null ? callbacks : new Difference.LCS.BalancedCallbacks();
// Fetch the matches from the __lcs algorithm
var matches = Difference.LCS.__lcs(seq1, seq2);
var a_size = seq1.length;
var b_size = seq2.length;
var ai = 0, bj = 0;
var mb = 0;
var ma = -1;
var string = seq1.constructor == String;
var ax = null, bx = null, event = null;
var execute = true;
// Process all the lines in the match vector.
while(true) {
// Find next match indices +ma+ and +mb+
while(execute) {
ma = ma + 1;
if(!(ma < matches.length && matches[ma] == null)) break;
// execute = !(ma < matches.length && matches[ma] == null);
}
if(ma >= matches.length) break; // end of matches
mb = matches[ma];
// Change seq2
while((ai < ma) || (bj < mb)) {
ax = string ? seq1.substr(ai, 1) : seq1[ai];
bx = string ? seq2.substr(bj, 1) : seq2[bj];
// Calculate result
var result = [(ai < ma), (bj < mb)];
if(result[0] && result[1]) {
if(callbacks.change != null) {
event = new Difference.LCS.ContextChange('!', ai, ax, bj, bx);
if(block != null) event = block(event);
callbacks.change(event);
ai = ai + 1;
bj = bj + 1;
} else {
event = new Difference.LCS.ContextChange('-', ai, ax, bj, bx);
if(block != null) event = block(event);
callbacks.discard_a(event);
ai = ai + 1;
ax = string ? seq1.substr(ai, 1) : seq1[ai];
event = new Difference.LCS.ContextChange('+', ai, ax, bj, bx);
if(block != null) event = block(event);
callbacks.discard_b(event);
bj = bj + 1
}
} else if(result[0] && !result[1]) {
event = new Difference.LCS.ContextChange('-', ai, ax, bj, bx);
if(block != null) event = block(event);
callbacks.discard_a(event);
ai = ai + 1;
} else if(!result[0] && result[1]) {
event = new Difference.LCS.ContextChange('+', ai, ax, bj, bx);
if(block != null) event = block(event);
callbacks.discard_b(event);
bj = bj + 1;
}
}
// Match
ax = string ? seq1.substr(ai, 1) : seq1[ai];
bx = string ? seq2.substr(bj, 1) : seq2[bj];
event = new Difference.LCS.ContextChange('=', ai, ax, bj, bx);
if(block != null) event = block(event);
callbacks.match(event);
ai = ai + 1;
bj = bj + 1;
}
while((ai < a_size) || (bj < b_size)) {
ax = string ? seq1.substr(ai, 1) : seq1[ai];
bx = string ? seq2.substr(bj, 1) : seq2[bj];
var result = [(ai < a_size), (bj < b_size)];
if(result[0] && result[1]) {
if(callbacks.change != null) {
event = new Difference.LCS.ContextChange('!', ai, ax, bj, bx);
if(block != null) event = block(event);
callbacks.change(event);
ai = ai + 1;
bj = bj + 1;
} else {
event = new Difference.LCS.ContextChange('-', ai, ax, bj, bx);
if(block != null) event = block(event);
callbacks.discard_a(event);
ai = ai + 1;
ax = string ? seq1.substr(ai, 1) : seq1[ai];
event = new Difference.LCS.ContextChange('+', ai, ax, bj, bx);
if(block != null) event = block(event);
callbacks.discard_b(event);
bj = bj + 1;
}
} else if(result[0] && !result[1]) {
event = new Difference.LCS.ContextChange('-', ai, ax, bj, bx);
if(block != null) event = block(event);
callbacks.discard_a(event);
ai = ai + 1;
} else if(!result[0] && result[1]) {
event = new Difference.LCS.ContextChange('+', ai, ax, bj, bx);
if(block != null) event = block(event);
callbacks.discard_b(event);
bj = bj + 1;
}
}
}
// Given two sequenced Enumerables, LCS returns an Array containing their
// longest common subsequences.
//
// lcs = Diff::LCS.LCS(seq1, seq2)
//
// This array whose contents is such that:
//
// lcs.each_with_index do |ee, ii|
// assert(ee.nil? || (seq1[ii] == seq2[ee]))
// end
//
// If a block is provided, the matching subsequences will be yielded from
// +seq1+ in turn and may be modified before they are placed into the
// returned Array of subsequences.
Difference.LCS.LCS = function(seq1, seq2, block) {
var matches = Difference.LCS.__lcs(seq1, seq2);
var ret = [];
for(var ii = 0; ii < matches.length; ii++) {
if(matches[ii] != null) {
if(block != null) {
ret.push(block(seq1[ii]));
} else {
ret.push(seq1[ii]);
}
}
}
// Return the result
return ret;
}
var PATCH_MAP = {
patch:{ '+':'+', '-':'-', '!':'!', '=':'=' },
unpatch:{ '+':'-', '-':'+', '!':'!', '=':'=' }
}
// Given a patchset, convert the current version to the new
// version. If +direction+ is not specified (must be
// <tt>:patch</tt> or <tt>:unpatch</tt>), then discovery of the
// direction of the patch will be attempted.
Difference.LCS.patch = function(src, patchset, direction) {
var string = src.constructor == String;
// Start with an empty type of the source's class
var res = string ? '' : [];
// Normalize the patchset
var patchset = this.__normalize_patchset(patchset);
var direction = direction || Difference.LCS.__diff_direction(src, patchset);
direction = direction || "patch";
var ai = 0, bj = 0;
var el = null, op = null, np = null;
for(var i = 0; i < patchset.length; i++) {
var change = patchset[i];
// Both Change and ContextChange has the action
var action = PATCH_MAP[direction][change.action];
if(change instanceof ContextChange) {
if(direction == 'patch') {
el = change.new_element;
op = change.old_position;
np = change.new_position;
} else if(direction == 'unpatch') {
el = change.old_element;
op = change.new_position;
np = change.old_position;
}
if(action == '-') { //Remove details from the old string
while(ai < op) {
string ? res = res + src.substr(ai, 1) : res.push(src[ai]);
ai = ai + 1;
bj = bj + 1;
}
ai = ai + 1;
} else if(action == '+') {
while(bj < np) {
string ? res = res + src.substr(ai, 1) : res.push(src[ai]);
ai = ai + 1;
bj = bj + 1;
}
string ? res = res + el : res.push(el);
bj = bj + 1;
} else if(action == '=') {
// This only appears in sdiff output with the SDiff callback.
// Therefore, we only need to worry about dealing with a single
// element.
string ? res = res + el : res.push(el);
ai = ai + 1;
bj = bj + 1;
} else if(action == '!') {
while(ai < op) {
string ? res = res + src.substr(ai, 1) : res.push(src[ai]);
ai = ai + 1;
bj = bj + 1;
}
bj = bj + 1;
ai = ai + 1;
string ? res = res + el : res.push(el);
}
} else if(change instanceof Change) {
if(action == '-') {
while(ai < change.position) {
string ? res = res + src.substr(ai, 1) : res.push(src[ai]);
ai = ai + 1;
bj = bj + 1;
}
ai = ai + 1;
} else if(action = '+') {
while(bj < change.position) {
string ? res = res + src.substr(ai, 1) : res.push(src[ai]);
ai = ai + 1;
bj = bj + 1;
}
bj = bj + 1;
string ? res = res + change.element : res.push(change.element);
}
}
}
while(ai < src.length) {
string ? res = res + src.substr(ai, 1) : res.push(src[ai]);
ai = ai + 1;
bj = bj + 1;
}
return res;
}
// Examine the patchset and the source to see in which direction the
// patch should be applied.
//
// WARNING: By default, this examines the whole patch, so this could take
// some time. This also works better with Diff::LCS::ContextChange or
// Diff::LCS::Change as its source, as an array will cause the creation
// of one of the above.
Difference.LCS.__diff_direction = function(src, patchset, limit) {
var count = 0, left = 0, left_miss = 0, right = 0, right_miss = 0, element = null;
var string = src.constructor == String;
// Process all changes in the patchset
for(var i = 0; i < patchset.length; i++) {
var change = patchset[i];
count = count + 1;
if(change instanceof Change) {
// With a simplistic change, we can't tell the difference between
// the left and right on '!' actions, so we ignore those. On '='
// actions, if there's a miss, we miss both left and right.
element = string ? src.substr(change.position, 1) : src[change.position];
if(change.action == '-') {
element == change.element ? left = left + 1 : left_miss = left_miss + 1;
} else if(change.action == '+') {
element == change.element ? right = right + 1 : right_miss = right_miss + 1;
} else if(change.action == '=') {
if(element != change.element) {
left_miss = left_miss + 1;
right_miss = right_miss + 1;
}
}
} else if(change instanceof ContextChange) {
if(change.action == '-') {
element = string ? src.substr(change.old_position, 1) : src[change.old_position];
element == change.old_element ? left = left + 1 : left_miss = left_miss + 1;
} else if(change.action == '+') {
element = string ? src.substr(change.new_position, 1) : src[change.new_position];
element == change.new_element ? right = right + 1 : right_miss = right_miss + 1;
} else if(change.action == '=') {
var le = string ? src.substr(change.old_position, 1) : src[change.old_position];
var re = string ? src.substr(change.new_position, 1) : src[change.new_position];
if(le != change.old_element) left_miss = left_miss + 1;
if(re != change.new_element) right_miss = right_miss + 1;
} else if(change.action == '!') {
element = string ? src.substr(change.old_position, 1) : src[change.old_position];
if(element == change.old_element) {
left = left + 1;
} else {
left_miss = left_miss + 1;
right_miss = right_miss + 1;
}
}
}
if(limit != null && count > limit) break;
};
var no_left = (left == 0) && (left_miss >= 0);
var no_right = (right == 0) && (right_miss >= 0);
var result = [no_left, no_right];
if(!no_left && no_right) {
return "patch";
} else if(no_left && !no_right) {
return "unpatch";
} else {
throw "The provided patchset does not appear to apply to the provided value as either source or destination value."
}
}
// Normalize the patchset. A patchset is always a sequence of changes, but
// how those changes are represented may vary, depending on how they were
// generated. In all cases we support, we also support the array
// representation of the changes. The formats are:
//
// [ // patchset <- Diff::LCS.diff(a, b)
// [ // one or more hunks
// Diff::LCS::Change // one or more changes
// ] ]
//
// [ // patchset, equivalent to the above
// [ // one or more hunks
// [ action, line, value ] // one or more changes
// ] ]
//
// [ // patchset <- Diff::LCS.diff(a, b, Diff::LCS::ContextDiffCallbacks)
// // OR <- Diff::LCS.sdiff(a, b, Diff::LCS::ContextDiffCallbacks)
// [ // one or more hunks
// Diff::LCS::ContextChange // one or more changes
// ] ]
//
// [ // patchset, equivalent to the above
// [ // one or more hunks
// [ action, [ old line, old value ], [ new line, new value ] ]
// // one or more changes
// ] ]
//
// [ // patchset <- Diff::LCS.sdiff(a, b)
// // OR <- Diff::LCS.diff(a, b, Diff::LCS::SDiffCallbacks)
// Diff::LCS::ContextChange // one or more changes
// ]
//
// [ // patchset, equivalent to the above
// [ action, [ old line, old value ], [ new line, new value ] ]
// // one or more changes
// ]
//
// The result of this will be either of the following.
//
// [ // patchset
// Diff::LCS::ContextChange // one or more changes
// ]
//
// [ // patchset
// Diff::LCS::Change // one or more changes
// ]
//
// If either of the above is provided, it will be returned as such.
//
Difference.LCS.__normalize_patchset = function(patchset) {
return flatten(patchset.map(function(hunk) {
if(hunk instanceof ContextChange || hunk instanceof Change) {
return hunk;
} else if(Array.isArray(hunk)) {
if(!Array.isArray(hunk[0]) && Array.isArray(hunk[1]) && Array.isArray(hunk[2])) {
return ContextChange.from_a(hunk);
} else {
return hunk.map(function(change) {
if(change instanceof ContextChange || change instanceof Change) {
return change;
} else if(Array.isArray(change)) {
// change[1] will ONLY be an array in a ContextChange#to_a call.
// In Change#to_a, it represents the line (singular).
if(Array.isArray(change[1])) {
return ContextChange.from_a(change);
} else {
return Change.from_a(change);
}
}
});
}
} else {
throw "Cannot normalize the hunk: " + util.inspect(hunk);
}
}));
}
// Gotten from
var flatten = function(array) {
return array.reduce(function(a,b) {
return a.concat(b);
}, []);
}
// Compute the longest common subsequence between the arrays a and b the result
// being an array whose content is such that they
// count = 0
// result.forEach(function(e) {
// if(e) a[count] == b[e];
// count++;
// })
Difference.LCS.__lcs = function(a, b) {
var a_start = 0;
var b_start = 0;
var a_finish = a.length - 1;
var b_finish = b.length - 1;
var vector = [];
// Remove common elements at the beginning
while((a_start <= a_finish) && (b_start <= b_finish) && (a[a_start] == b[b_start])) {
vector[a_start] = b_start;
a_start = a_start + 1;
b_start = b_start + 1;
}
// Remove common elements at the end
while((a_start <= a_finish) && (b_start <= b_finish) && (a[a_finish] == b[b_finish])) {
vector[a_finish] = b_finish;
a_finish = a_finish - 1;
b_finish = b_finish - 1;
}
// Now compute the equivalent classes of positions of elements
var b_matches = Difference.LCS.__position_hash(b, b_start, b_finish);
// Define treshold and links
var thresh = [];
var links = [];
for(var ii = a_start; ii <= a_finish; ii++) {
var ai = Array.isArray(a) ? a[ii] : a.charAt(ii);
var bm = b_matches[ai];
bm = bm ? bm : [];
var kk = null;
bm.reverse().forEach(function(jj) {
if(kk != null && (thresh[kk] > jj) && (thresh[kk - 1] < jj)) {
thresh[kk] = jj;
} else {
kk = Difference.LCS.__replace_next_larger(thresh, jj, kk);
}
// Add link
if(kk != null) links[kk] = [(kk > 0) ? links[kk - 1] : null, ii, jj];
});
}
// Build the vector
if(thresh.length > 0) {
var link = links[thresh.length - 1];
while(link != null) {
vector[link[1]] = link[2];
link = link[0];
}
}
// Return the vector of the longest commong subsequence
return vector;
}
// Find the place at which +value+ would normally be inserted into the
// Enumerable. If that place is already occupied by +value+, do nothing
// and return +nil+. If the place does not exist (i.e., it is off the end
// of the Enumerable), add it to the end. Otherwise, replace the element
// at that point with +value+. It is assumed that the Enumerable's values
// are numeric.
//
// This operation preserves the sort order.
Difference.LCS.__replace_next_larger = function(enumerable, value, last_index) {
// Is it off the end
if(enumerable.length == 0 || (value > enumerable[enumerable.length - 1])) {
enumerable.push(value);
return enumerable.length - 1;
}
// Binary search for the insertion point
var last_index = last_index || enumerable.length;
var first_index = 0;
while(first_index <= last_index) {
var ii = (first_index + last_index) >> 1;
var found = enumerable[ii];
if(value == found) {
return null;
} else if(value > found) {
first_index = ii + 1;
} else {
last_index = ii - 1;
}
}
// The insertion point is in first_index; overwrite the next larger
// value.
enumerable[first_index] = value;
return first_index;
}
Difference.LCS.__position_hash = function(enumerable, interval_start, interval_end) {
interval_start = interval_start ? interval_start : 0;
interval_end = interval_end ? interval_end : -1;
var hash = {}
for(var i = interval_start; i <= interval_end; i++) {
var kk = Array.isArray(enumerable) ? enumerable[i] : enumerable.charAt(i);
hash[kk] = Array.isArray(hash[kk]) ? hash[kk] : [];
hash[kk].push(i);
}
return hash;
}

291
node_modules/git/lib/diff/hunk.js generated vendored Normal file
View File

@ -0,0 +1,291 @@
var Block = require('./block').Block;
// A Hunk is a group of Blocks which overlap because of the context
// surrounding each block. (So if we're not using context, every hunk will
// contain one block.) Used in the diff program (bin/diff).
var Hunk = exports.Hunk = function(data_old, data_new, piece, context, file_length_difference) {
// Internal variables
var _flag_context = null;
var self = this;
// At first, a hunk will have just one Block in it
this.blocks = [new Block(piece)];
this.data_old = data_old;
this.data_new = data_new;
var before = file_length_difference, after = file_length_difference;
after = after + this.blocks[0].diff_size;
this.file_length_difference = after; // The caller must get this manually
// Other parameters
var a1 = null, a2 = null;
var b1 = null, b2 = null;
// Save the start & end of each array. If the array doesn't exist
// (e.g., we're only adding items in this block), then figure out the
// line number based on the line number of the other file and the
// current difference in file lengths.
if(this.blocks[0].remove.length > 0) {
a1 = this.blocks[0].remove[0].position;
a2 = this.blocks[0].remove[this.blocks[0].remove.length - 1].position;
}
if(this.blocks[0].insert.length > 0) {
b1 = this.blocks[0].insert[0].position;
b2 = this.blocks[0].insert[this.blocks[0].insert.length - 1].position;
}
this.start_old = a1 || (b1 - before);
this.start_new = b1 || (a1 + before);
this.end_old = a2 || (b2 - after);
this.end_new = b2 || (a2 + after);
// Change the "start" and "end" fields to note that context should be added
// to this hunk
Object.defineProperty(this, "flag_context", { get: function() { return _flag_context; }, set: function(context) {
if(context == null || context == 0) return null;
var add_start = (context > self.start_old) ? self.start_old : context;
var add_end = null;
self.start_old = self.start_old - add_start;
self.start_new = self.start_new - add_start;
if((self.end_old + context) > self.data_old.length) {
add_end = self.data_old.length - self.end_old;
} else {
add_end = context;
}
self.end_old = self.end_old + add_end;
self.end_new = self.end_new + add_end;
_flag_context = context;
}, enumerable: true});
// Set the flag_context
this.flag_context = context;
}
Hunk.prototype.unshift = function(hunk) {
this.start_old = hunk.start_old;
this.start_new = hunk.start_new;
this.blocks = hunk.blocks.concat(this.blocks);
}
// Is there an overlap between hunk arg0 and old hunk arg1? Note: if end
// of old hunk is one less than beginning of second, they overlap
Hunk.prototype.overlaps = function(hunk) {
if(hunk == null) return null;
var a = (this.start_old - hunk.end_old) <= 1;
var b = (this.start_new - hunk.end_new) <= 1;
return (a || b);
}
Hunk.prototype.diff = function(format) {
if(format == "old") {
return old_diff(this);
} else if(format == 'unified') {
return unified_diff(this);
} else if(format == 'context') {
return context_diff(this);
} else if(format == 'ed') {
return this;
} else if(format == 'reverse_ed' || format == 'ed_finish') {
return ed_diff(this, format);
} else {
throw "unknown diff format " + format;
}
}
Hunk.prototype.each_old = function(block) {
var entries = this.data_old.slice(this.start_old, this.end_old);
entries.forEach(function(e) {
block(e);
});
}
// Note that an old diff can't have any context. Therefore, we know that
// there's only one block in the hunk.
var old_diff = function(hunk) {
if(hunk.blocks.length > 1) sys.puts("expecting only one block in an old diff hunk!");
// Set up operation actions
var opt_act = {'+':'a', '-':'d', '!':'c'};
var block = hunk.blocks[0];
// Calculate item number range. Old diff range is just like a context
// diff range, except the ranges are on one line with the action between
// them.
var s = "" + context_rang("old") + opt_act[block.op] + context_rang("new") + "\n";
// If removing anything, just print out all the remove lines in the hunk
// which is just all the remove lines in the block.
if(block.remove.length > 0) {
hunk.data_old.slice(hunk.start_old, hunk.end_old).forEach(function(e) {
s = s + "< " + e + "\n";
});
}
if(block.insert.length > 0) {
hunk.data_new.slice(hunk.start_new, hunk.end_new).forEach(function(e) {
s = s + "> " + e + "\n;"
});
}
// Return the diff string
return s;
}
var unified_diff = function(hunk) {
// Calculate item number range.
var s = "@@ -" + unified_range(hunk, 'old') + " +" + unified_range(hunk, 'new') + " @@\n";
// Outlist starts containing the hunk of the old file. Removing an item
// just means putting a '-' in front of it. Inserting an item requires
// getting it from the new file and splicing it in. We splice in
// +num_added+ items. Remove blocks use +num_added+ because splicing
// changed the length of outlist.
//
// We remove +num_removed+ items. Insert blocks use +num_removed+
// because their item numbers -- corresponding to positions in the NEW
// file -- don't take removed items into account.
var lo = hunk.start_old;
var hi = hunk.end_old;
var num_added = 0;
var num_removed = 0;
// Create list of stripped entries
var outlist = hunk.data_old.slice(lo, hi + 1).map(function(e) { return e.replace(/^/g, ' '); });
// Process all the blocks
hunk.blocks.forEach(function(block) {
block.remove.forEach(function(item) {
var op = item.action.toString(); // -
var offset = item.position - lo + num_added;
outlist[offset] = outlist[offset].replace(/^ /g, op.toString());
num_removed = num_removed + 1;
})
block.insert.forEach(function(item) {
var op = item.action.toString(); // +
var offset = item.position - hunk.start_new + num_removed;
outlist.splice(offset, 0, ("" + op + hunk.data_new[item.position]));
num_added = num_added + 1;
});
});
// Return the list
return s + outlist.join('\n');
}
var context_diff = function(hunk) {
var s = '***************\n';
s = s + '*** ' + context_range(hunk, 'old') + ' ****\n';
// Retrieve the context
var r = context_range(hunk, 'new');
var outlist = null;
// Print out file 1 part for each block in context diff format if there
// are any blocks that remove items
var lo = hunk.start_old;
var hi = hunk.end_old;
var removes = hunk.blocks.filter(function(e) { return !(e.remove.length == 0); });
if(removes) {
outlist = hunk.data_old.slice(lo, hi).map(function(e) { return e.replace(/^/g, ' '); });
removes.forEach(function(block) {
block.remove.forEach(function(item) {
outlist[item.position - lo] = outlist[item.position - lo].replace(/^ /g, block.op); // - or !
});
});
// Add to diff string
s = s + outlist.join('\n');
}
s = s + '\n-- ' + r + ' ----\n';
lo = hunk.start_new;
hi = hunk.end_new;
var inserts = hunk.blocks.filter(function(e) { return !(e.insert.length == 0); });
if(inserts) {
outlist = hunk.data_new.slice(lo, hi).map(function(e) { return e.replace(/^/g, ' '); });
inserts.forEach(function(block) {
block.insert.forEach(function(item) {
outlist[item.position - lo] = outlist[item.position - lo].replace(/^ /g, block.op); // + or !
});
});
// Add to diff string
s = s + outlist.join('\n');
}
// Return the diff string
return s;
}
var ed_diff = function(hunk, format) {
var opt_act = {'+':'a', '-':'d', '!':'c'};
if(hunk.blocks.length > 1) sys.puts("expecting only one block in an old diff hunk!");
var s = null;
if(format == 'reverse_ed') {
s = "" + op_act[hunk.blocks[0].op] + context_range(hunk, 'old') + '\n';
} else {
s = "" + context_range(hunk, 'old').replace(/,/g, ' ') + op_act[hunk.blocks[0].op] + '\n';
}
if(hunk.blocks[0].insert.length > 0) {
hunk.data_new.slice(hunk.start_new, hunk.end_new).forEach(function(e) {
s = s + '' + e + '\n';
});
// Add final marker
s = s + '.\n';
}
// Return diff string
return s;
}
// Generate a range of item numbers to print. Only print 1 number if the
// range has only one item in it. Otherwise, it's 'start,end'
var context_range = function(hunk, mode) {
var s = null, e = null;
if(mode == 'old') {
s = (hunk.start_old + 1);
e = (hunk.end_old + 1);
} else if(mode == 'new') {
s = (hunk.start_new + 1);
e = (hunk.end_new + 1);
}
return (s < e) ? ("" + s + "," + e) : ("" + e);
}
// Generate a range of item numbers to print for unified diff. Print
// number where block starts, followed by number of lines in the block
// (don't print number of lines if it's 1)
var unified_range = function(hunk, mode) {
var s = null, e = null;
if(mode == 'old') {
s = (hunk.start_old + 1);
e = (hunk.end_old + 1);
} else if(mode == 'new') {
s = (hunk.start_new + 1);
e = (hunk.end_new + 1);
}
var length = e - s + 1;
var first = (length < 2) ? e : s; // something weird
return (length == 1) ? ("" + first) : (first + "," + length);
}

21
node_modules/git/lib/git/actor.js generated vendored Normal file
View File

@ -0,0 +1,21 @@
var util = require('util');
Actor = exports.Actor = function(name, email) {
var _name = name, _email = email;
// Control access to internal variables
Object.defineProperty(this, "name", { get: function() { return _name; }, set: function(value) { _name = value; }, enumerable: true});
Object.defineProperty(this, "email", { get: function() { return _email; }, set: function(value) { _email = value; }, enumerable: true});
}
Actor.from_string = function(string) {
if(string.match(/<.+>/)) {
var results = string.match(/(.*) <(.+?)>/);
return new Actor(results[1], results[2]);
} else {
return new Actor(string, null);
}
}
Actor.prototype.toString = function() {
return this.name;
}

253
node_modules/git/lib/git/binary_parser.js generated vendored Normal file
View File

@ -0,0 +1,253 @@
//+ Jonas Raoni Soares Silva
//@ http://jsfromhell.com/classes/binary-parser [v1.0]
var chr = String.fromCharCode;
var p = exports.BinaryParser = function( bigEndian, allowExceptions ){
this.bigEndian = bigEndian;
this.allowExceptions = allowExceptions;
};
var Buffer = exports.BinaryParser.Buffer = function( bigEndian, buffer ){
this.bigEndian = bigEndian || 0;
this.buffer = [];
this.setBuffer( buffer );
};
Buffer.prototype.setBuffer = function( data ){
if( data ){
for( var l, i = l = data.length, b = this.buffer = new Array( l ); i; b[l - i] = data.charCodeAt( --i ) );
this.bigEndian && b.reverse();
}
};
Buffer.prototype.hasNeededBits = function( neededBits ){
return this.buffer.length >= -( -neededBits >> 3 );
};
Buffer.prototype.checkBuffer = function( neededBits ){
if( !this.hasNeededBits( neededBits ) )
throw new Error( "checkBuffer::missing bytes" );
};
Buffer.prototype.readBits = function( start, length ){
//shl fix: Henri Torgemane ~1996 (compressed by Jonas Raoni)
function shl( a, b ){
for( ; b--; a = ( ( a %= 0x7fffffff + 1 ) & 0x40000000 ) == 0x40000000 ? a * 2 : ( a - 0x40000000 ) * 2 + 0x7fffffff + 1 );
return a;
}
if( start < 0 || length <= 0 )
return 0;
this.checkBuffer( start + length );
for( var offsetLeft, offsetRight = start % 8, curByte = this.buffer.length - ( start >> 3 ) - 1, lastByte = this.buffer.length + ( -( start + length ) >> 3 ), diff = curByte - lastByte, sum = ( ( this.buffer[ curByte ] >> offsetRight ) & ( ( 1 << ( diff ? 8 - offsetRight : length ) ) - 1 ) ) + ( diff && ( offsetLeft = ( start + length ) % 8 ) ? ( this.buffer[ lastByte++ ] & ( ( 1 << offsetLeft ) - 1 ) ) << ( diff-- << 3 ) - offsetRight : 0 ); diff; sum += shl( this.buffer[ lastByte++ ], ( diff-- << 3 ) - offsetRight ) );
return sum;
};
p.warn = function( msg ){
if( this.allowExceptions )
throw new Error( msg );
return 1;
};
p.decodeFloat = function( data, precisionBits, exponentBits ){
var b = new this.Buffer( this.bigEndian, data );
b.checkBuffer( precisionBits + exponentBits + 1 );
var bias = Math.pow( 2, exponentBits - 1 ) - 1, signal = b.readBits( precisionBits + exponentBits, 1 ), exponent = b.readBits( precisionBits, exponentBits ), significand = 0,
divisor = 2, curByte = b.buffer.length + ( -precisionBits >> 3 ) - 1;
do{
for( var byteValue = b.buffer[ ++curByte ], startBit = precisionBits % 8 || 8, mask = 1 << startBit; mask >>= 1; ( byteValue & mask ) && ( significand += 1 / divisor ), divisor *= 2 );
}while( precisionBits -= startBit );
return exponent == ( bias << 1 ) + 1 ? significand ? NaN : signal ? -Infinity : +Infinity : ( 1 + signal * -2 ) * ( exponent || significand ? !exponent ? Math.pow( 2, -bias + 1 ) * significand : Math.pow( 2, exponent - bias ) * ( 1 + significand ) : 0 );
};
p.decodeInt = function( data, bits, signed, forceBigEndian ){
var b = new this.Buffer( this.bigEndian||forceBigEndian, data ), x = b.readBits( 0, bits ), max = Math.pow( 2, bits );
return signed && x >= max / 2 ? x - max : x;
};
p.encodeFloat = function( data, precisionBits, exponentBits ){
var bias = Math.pow( 2, exponentBits - 1 ) - 1, minExp = -bias + 1, maxExp = bias, minUnnormExp = minExp - precisionBits,
status = isNaN( n = parseFloat( data ) ) || n == -Infinity || n == +Infinity ? n : 0,
exp = 0, len = 2 * bias + 1 + precisionBits + 3, bin = new Array( len ),
signal = ( n = status !== 0 ? 0 : n ) < 0, n = Math.abs( n ), intPart = Math.floor( n ), floatPart = n - intPart,
i, lastBit, rounded, j, result;
for( i = len; i; bin[--i] = 0 );
for( i = bias + 2; intPart && i; bin[--i] = intPart % 2, intPart = Math.floor( intPart / 2 ) );
for( i = bias + 1; floatPart > 0 && i; ( bin[++i] = ( ( floatPart *= 2 ) >= 1 ) - 0 ) && --floatPart );
for( i = -1; ++i < len && !bin[i]; );
if( bin[( lastBit = precisionBits - 1 + ( i = ( exp = bias + 1 - i ) >= minExp && exp <= maxExp ? i + 1 : bias + 1 - ( exp = minExp - 1 ) ) ) + 1] ){
if( !( rounded = bin[lastBit] ) ){
for( j = lastBit + 2; !rounded && j < len; rounded = bin[j++] );
}
for( j = lastBit + 1; rounded && --j >= 0; ( bin[j] = !bin[j] - 0 ) && ( rounded = 0 ) );
}
for( i = i - 2 < 0 ? -1 : i - 3; ++i < len && !bin[i]; );
if( ( exp = bias + 1 - i ) >= minExp && exp <= maxExp )
++i;
else if( exp < minExp ){
exp != bias + 1 - len && exp < minUnnormExp && this.warn( "encodeFloat::float underflow" );
i = bias + 1 - ( exp = minExp - 1 );
}
if( intPart || status !== 0 ){
this.warn( intPart ? "encodeFloat::float overflow" : "encodeFloat::" + status );
exp = maxExp + 1;
i = bias + 2;
if( status == -Infinity )
signal = 1;
else if( isNaN( status ) )
bin[i] = 1;
}
for( n = Math.abs( exp + bias ), j = exponentBits + 1, result = ""; --j; result = ( n % 2 ) + result, n = n >>= 1 );
for( n = 0, j = 0, i = ( result = ( signal ? "1" : "0" ) + result + bin.slice( i, i + precisionBits ).join( "" ) ).length, r = []; i; j = ( j + 1 ) % 8 ){
n += ( 1 << j ) * result.charAt( --i );
if( j == 7 ){
r[r.length] = String.fromCharCode( n );
n = 0;
}
}
r[r.length] = n ? String.fromCharCode( n ) : "";
return ( this.bigEndian ? r.reverse() : r ).join( "" );
};
p.encodeInt = function( data, bits, signed, forceBigEndian ){
var max = Math.pow( 2, bits );
( data >= max || data < -( max / 2 ) ) && this.warn( "encodeInt::overflow" ) && ( data = 0 );
data < 0 && ( data += max );
for( var r = []; data; r[r.length] = String.fromCharCode( data % 256 ), data = Math.floor( data / 256 ) );
for( bits = -( -bits >> 3 ) - r.length; bits--; r[r.length] = "\0" );
return ( (this.bigEndian||forceBigEndian) ? r.reverse() : r ).join( "" );
};
p.toSmall = function( data ){ return this.decodeInt( data, 8, true ); };
p.fromSmall = function( data ){ return this.encodeInt( data, 8, true ); };
p.toByte = function( data ){ return this.decodeInt( data, 8, false ); };
p.fromByte = function( data ){ return this.encodeInt( data, 8, false ); };
p.toShort = function( data ){ return this.decodeInt( data, 16, true ); };
p.fromShort = function( data ){ return this.encodeInt( data, 16, true ); };
p.toWord = function( data ){ return this.decodeInt( data, 16, false ); };
p.fromWord = function( data ){ return this.encodeInt( data, 16, false ); };
p.toInt = function( data ){ return this.decodeInt( data, 32, true ); };
p.fromInt = function( data ){ return this.encodeInt( data, 32, true ); };
p.toLong = function( data ){ return this.decodeInt( data, 64, true ); };
p.fromLong = function( data ){ return this.encodeInt( data, 64, true ); };
p.toDWord = function( data ){ return this.decodeInt( data, 32, false ); };
p.fromDWord = function( data ){ return this.encodeInt( data, 32, false ); };
p.toQWord = function( data ){ return this.decodeInt( data, 64, true ); };
p.fromQWord = function( data ){ return this.encodeInt( data, 64, true ); };
p.toFloat = function( data ){ return this.decodeFloat( data, 23, 8 ); };
p.fromFloat = function( data ){ return this.encodeFloat( data, 23, 8 ); };
p.toDouble = function( data ){ return this.decodeFloat( data, 52, 11 ); };
p.fromDouble = function( data ){ return this.encodeFloat( data, 52, 11 ); };
// Factor out the encode so it can be shared by add_header and push_int32
p.encode_int32 = function(number) {
var a, b, c, d, unsigned;
unsigned = (number < 0) ? (number + 0x100000000) : number;
a = Math.floor(unsigned / 0xffffff);
unsigned &= 0xffffff;
b = Math.floor(unsigned / 0xffff);
unsigned &= 0xffff;
c = Math.floor(unsigned / 0xff);
unsigned &= 0xff;
d = Math.floor(unsigned);
return chr(a) + chr(b) + chr(c) + chr(d);
};
p.encode_int64 = function(number) {
var a, b, c, d, e, f, g, h, unsigned;
unsigned = (number < 0) ? (number + 0x10000000000000000) : number;
a = Math.floor(unsigned / 0xffffffffffffff);
unsigned &= 0xffffffffffffff;
b = Math.floor(unsigned / 0xffffffffffff);
unsigned &= 0xffffffffffff;
c = Math.floor(unsigned / 0xffffffffff);
unsigned &= 0xffffffffff;
d = Math.floor(unsigned / 0xffffffff);
unsigned &= 0xffffffff;
e = Math.floor(unsigned / 0xffffff);
unsigned &= 0xffffff;
f = Math.floor(unsigned / 0xffff);
unsigned &= 0xffff;
g = Math.floor(unsigned / 0xff);
unsigned &= 0xff;
h = Math.floor(unsigned);
return chr(a) + chr(b) + chr(c) + chr(d) + chr(e) + chr(f) + chr(g) + chr(h);
};
/**
UTF8 methods
**/
// Take a raw binary string and return a utf8 string
p.decode_utf8 = function(a) {
var string = "";
var i = 0;
var c = c1 = c2 = 0;
while ( i < a.length ) {
c = a.charCodeAt(i);
if (c < 128) {
string += String.fromCharCode(c);
i++;
} else if((c > 191) && (c < 224)) {
c2 = a.charCodeAt(i+1);
string += String.fromCharCode(((c & 31) << 6) | (c2 & 63));
i += 2;
} else {
c2 = a.charCodeAt(i+1);
c3 = a.charCodeAt(i+2);
string += String.fromCharCode(((c & 15) << 12) | ((c2 & 63) << 6) | (c3 & 63));
i += 3;
}
}
return string;
};
// Encode a cstring correctly
p.encode_cstring = function(s) {
return unescape(encodeURIComponent(s)) + p.fromByte(0);
};
// Take a utf8 string and return a binary string
p.encode_utf8 = function(s) {
var a="";
for (var n=0; n< s.length; n++) {
var c=s.charCodeAt(n);
if (c<128) {
a += String.fromCharCode(c);
} else if ((c>127)&&(c<2048)) {
a += String.fromCharCode( (c>>6) | 192) ;
a += String.fromCharCode( (c&63) | 128);
} else {
a += String.fromCharCode( (c>>12) | 224);
a += String.fromCharCode( ((c>>6) & 63) | 128);
a += String.fromCharCode( (c&63) | 128);
}
}
return a;
};
p.pprint = function(s) {
var util = require('util');
for (var i=0; i<s.length; i++) {
if (s.charCodeAt(i)<32) {util.puts(s.charCodeAt(i)+' : ');}
else {util.puts(s.charCodeAt(i)+' : '+ s.charAt(i));}
}
};
p.hprint = function(s) {
var util = require('util');
for (var i=0; i<s.length; i++) {
if (s.charCodeAt(i)<32) {util.puts(s.charCodeAt(i)+' : ');}
else {util.puts(s.charCodeAt(i).toString(16)+' : '+ s.charAt(i));}
}
};
p.hex = function(s) {
var util = require('util');
var string = ''
for (var i=0; i<s.length; i++) {
var c = s.charCodeAt(i).toString(16);
c = c.length == 1 ? "0" + c : c;
string = string + c;
}
return string;
};

64
node_modules/git/lib/git/blame.js generated vendored Normal file
View File

@ -0,0 +1,64 @@
var BlameLine = require('./blame_line').BlameLine;
var Blame = exports.Blame = function(repo, file, commit, callback) {
var _repo = repo, _file = file, _commit = commit, _lines = [];
// Unpack parameters as commit might be null
var args = Array.prototype.slice.call(arguments, 2);
callback = args.pop();
var _commit = args.length ? args.shift() : null;
// Control access to internal variables
Object.defineProperty(this, "repo", { get: function() { return _repo; }, set: function(value) { _repo = value; }, enumerable: false});
Object.defineProperty(this, "file", { get: function() { return _file; }, set: function(value) { _file = value; }, enumerable: true});
Object.defineProperty(this, "commit", { get: function() { return _commit; }, set: function(value) { _commit = value; }, enumerable: true});
Object.defineProperty(this, "lines", { get: function() { return _lines; }, set: function(value) { _lines = value; }, enumerable: true});
// Load the blame object
load_blame(this, _repo, _file, _commit, callback);
}
// Load and parse the blame
var load_blame = function(blame, repo, file, commit, callback) {
repo.git.blame({p:true}, commit, '--', file, function(err, blame_output) {
process_raw_blame(blame, blame_output, repo, callback)
});
}
// Parse the output and set all parameters on the current blame object
var process_raw_blame = function(blame, output, repo, callback) {
// Cleanup the output (removing whitespace at the start and end)
output = output ? output.trim() : '';
// Set up variables
var lines = [], final = [];
var info = {}, commits = [];
var output_lines = output.split("\n");
for(var i = 0; i < output_lines.length; i++) {
var line = output_lines[i];
var match = line.match(/^(\w{40}) (\d+) (\d+)/);
// If we have a tab character at the start skip it
if(line.substr(0, 1) == "\t") {
lines.push(line.substring(1, line.length));
} else if(match) {
if(!commits[match[1]]) {
repo.commit(match[1], function(err, commit) {
commits[match[1]] = commit;
});
}
// Add the info for this line
info[parseInt(match[3])] = [commits[match[1]], parseInt(match[2])];
}
}
// Let's sort the content
var sorted_keys = Object.keys(info).sort(function(a, b) { return parseInt(a) - parseInt(b); });
sorted_keys.forEach(function(key) {
var info_object = info[key];
final.push(new BlameLine(key, info_object[1], info_object[0], lines[key - 1]));
});
// Assign the blame lines to the blame object and return
blame.lines = final;
callback(null, blame);
}

10
node_modules/git/lib/git/blame_line.js generated vendored Normal file
View File

@ -0,0 +1,10 @@
var BlameLine = exports.BlameLine = function(lineno, oldlineno, commit, line) {
var _lineno = lineno, _oldlineno = oldlineno, _commit = commit, _line = line;
// Control access to internal variables
Object.defineProperty(this, "lineno", { get: function() { return _lineno; }, set: function(value) { _lineno = value; }, enumerable: true});
Object.defineProperty(this, "oldlineno", { get: function() { return _oldlineno; }, set: function(value) { _oldlineno = value; }, enumerable: true});
Object.defineProperty(this, "commit", { get: function() { return _commit; }, set: function(value) { _commit = value; }, enumerable: true});
Object.defineProperty(this, "line", { get: function() { return _line; }, set: function(value) { _line = value; }, enumerable: true});
}

145
node_modules/git/lib/git/blob.js generated vendored Normal file
View File

@ -0,0 +1,145 @@
var mime = require('mime'),
Actor = require('./actor').Actor;
var Blob = exports.Blob = function(repo, id, mode, name) {
var _repo = repo, _id = id, _mode = mode, _name = name, _content = null, _data = null, _size = 0;
Object.defineProperty(this, "repo", { get: function() { return _repo; }, set: function(value) { _repo = value; }, enumerable: true});
Object.defineProperty(this, "id", { get: function() { return _id; }, set: function(value) { _id = value; }, enumerable: true});
Object.defineProperty(this, "mode", { get: function() { return _mode; }, set: function(value) { _mode = value; }, enumerable: true});
Object.defineProperty(this, "name", { get: function() { return _name; }, set: function(value) { _name = value; }, enumerable: true});
Object.defineProperty(this, "content", { get: function() { return _content; }, set: function(value) { _content = value; }, enumerable: true});
// Data of the blob
Object.defineProperty(this, "data", { get: function() {
_data = lazy_reader(_repo, _id, 'p', _data);
return _data;
}, enumerable: false});
// Size of the blob
Object.defineProperty(this, "size", { get: function() {
_size = lazy_reader(_repo, _id, 's', _size);
return _size;
}, enumerable: false});
// Size of the blob
Object.defineProperty(this, "mime_type", { get: function() {
return mime.lookup(_name || '', 'text/plain')
}, enumerable: false});
// Return the base name
Object.defineProperty(this, "basename", { get: function() {
if(_name) {
var parts = _name.split("/");
return parts[parts.length - 1];
} else {
return null;
}
}, enumerable: false});
}
var lazy_reader = function(repo, id, type, variable) {
if(variable) return variable;
// Control the flow
var done = false;
var value = null;
// Fetch the content
repo.git.cat_file(type, id, function(err, content) {
if(err) return done = true;
value = content;
done = true;
})
while(!done) {};
return value;
}
// The blame information for the given file at the given commit
//
// Returns array of commit and array of lines
Blob.blame = function(repo, commit, file, callback) {
var Commit = require('./commit').Commit;
repo.git.blame({'p':true}, commit, '--', file, function(err, data) {
if(err) return callback(err, data);
// Variables stored
var commits = {};
var blames = [];
var info = null;
// Split up and parse the output
var lines = data.split("\n");
lines.forEach(function(line) {
var parts = line.split(/\s+/);
if(parts.length > 0) {
var part = parts[0];
// Process the part
if(part.match(/^[0-9A-Fa-f]{40}$/)) {
// Parse references to SHA keys
if(line.match(/^([0-9A-Fa-f]{40}) (\d+) (\d+) (\d+)$/)) {
var matches = line.match(/^([0-9A-Fa-f]{40}) (\d+) (\d+) (\d+)$/);
var id = matches[1];
var origin_line = matches[2];
var final_line = matches[3];
var group_lines = matches[4];
// Set if of the current reference
info = {id:id};
blames.push([null, []]);
} else if(line.match(/^([0-9A-Fa-f]{40}) (\d+) (\d+)$/)) {
var matches = line.match(/^([0-9A-Fa-f]{40}) (\d+) (\d+)$/);
var id = matches[1];
var origin_line = matches[2];
var final_line = matches[3];
// Set if of the current reference
info = {id:id};
}
} else if(part.match(/^(author|committer)/)) {
if(part.match(/^(.+)-mail$/)) {
info[part.match(/^(.+)-mail$/)[1] + "_email"] = parts[parts.length - 1];
} else if(part.match(/^(.+)-time$/)) {
info[part.match(/^(.+)-time$/)[1] + "_date"] = new Date(parseInt(parts[parts.length - 1]) * 1000);
} else if(part.match(/^(author|committer)$/)) {
info[part.match(/^(author|committer)$/)[1]] = parts.slice(1).join(" ");
}
} else if(part.match(/^filename/)) {
info['filename'] = parts[parts.length - 1];
} else if(part.match(/^summary/)) {
info['summary'] = parts.slice(1).join(" ").replace('\n', '');
} else if(part == '') {
var commit = commits[info["id"]];
// Create new commit
if(!commit) {
// commit = new Commit(repo, )
var id = info['id'];
var author = Actor.from_string(info['author'] + ' ' + info['author_email']);
var authored_date = info['author_date'];
var committer = Actor.from_string(info['committer'] + ' ' + info['committer_email']);
var committed_date = info['committer_date'];
var message = info['summary'];
// Create a new commit
commit = new Commit(repo, id, null, null, author, authored_date, committer, committed_date, message);
commits[info['id']] = commit;
}
// Break up the parts
parts = line.match(/^\t(.*)$/);
blames[blames.length - 1][0] = commit;
blames[blames.length - 1][1].push(parts[1]);
info = null;
}
}
});
// Call back with the list of blames
callback(null, blames);
});
}

336
node_modules/git/lib/git/commit.js generated vendored Normal file
View File

@ -0,0 +1,336 @@
var util = require('util'),
Actor = require('./actor').Actor,
Diff = require('./diff').Diff;
Tree = require('./tree').Tree;
// Create a commit object
var Commit = exports.Commit = function(repo, id, parents, tree, author, authored_date, committer, committed_date, message, filechanges) {
var _repo = repo, _id = id, _parents = parents, _tree = tree, _author = author, _authored_date = authored_date;
var _committer = committer, _committed_date = committed_date, _id_abbrev = null, _filechanges = filechanges;
// Ensure we have an empty message at least
message = message ? message : [];
message = Array.isArray(message) ? message : [message];
var _message = message.join("\n");
// Extract short message
var message_lines_filtered = message.filter(function(line) {
return line.trim() == '' ? false : true;
})
var _short_message = message_lines_filtered.length > 0 ? message_lines_filtered[0] : '';
// Internal properties
Object.defineProperty(this, "repo", { get: function() { return _repo; }, set: function(value) { _repo = value; }, enumerable: true, configurable:true});
Object.defineProperty(this, "id", { get: function() { return _id; }, set: function(value) { _id = value; }, enumerable: true, configurable:true});
Object.defineProperty(this, "sha", { get: function() { return _id; }, set: function(value) { _id = value; }, enumerable: true, configurable:true});
Object.defineProperty(this, "parents", { get: function() {
_parents = lazy_reader(_repo, _id, 'parents', _parents);
return _parents;
}, set: function(value) { _parents = value; }, enumerable: true, configurable:true});
Object.defineProperty(this, "tree", { get: function() {
_tree = lazy_reader(_repo, _id, 'tree', _tree);
return _tree;
}, set: function(value) { _tree = value; }, enumerable: true, configurable:true});
Object.defineProperty(this, "author", { get: function() {
_author = lazy_reader(_repo, _id, 'author', _author);
return _author;
}, set: function(value) { _author = value; }, enumerable: true, configurable:true});
Object.defineProperty(this, "authored_date", { get: function() {
_authored_date = lazy_reader(_repo, _id, 'authored_date', _authored_date);
return _authored_date;
}, set: function(value) { _authored_date = value; }, enumerable: true, configurable:true});
Object.defineProperty(this, "committer", { get: function() {
_committer = lazy_reader(_repo, _id, 'comitter', _committer);
return _committer;
}, set: function(value) { _comitter = value; }, enumerable: true, configurable:true});
Object.defineProperty(this, "committed_date", { get: function() {
_committed_date = lazy_reader(_repo, _id, 'committed_date', _committed_date);
return _committed_date;
}, set: function(value) { _committed_date = value; }, enumerable: true, configurable:true});
Object.defineProperty(this, "message", { get: function() {
_message = lazy_reader(_repo, _id, 'message', _message);
return _message;
}, set: function(value) { _message = value; }, enumerable: true, configurable:true});
Object.defineProperty(this, "short_message", { get: function() {
_short_message = lazy_reader(_repo, _id, 'short_message', _short_message);
return _short_message;
}, set: function(value) { _short_message = value; }, enumerable: true, configurable:true});
Object.defineProperty(this, "filechanges", { get: function() {
_filechanges = lazy_reader(_repo, _id, 'filechanges', _filechanges);
return _filechanges;
}, set: function(value) { _filechanges = value; }, enumerable: true, configurable:true});
Object.defineProperty(this, "_id_abbrev", { get: function() { return _id_abbrev; }, set: function(value) { _id_abbrev = value; }, enumerable: true, configurable:true});
}
var lazy_reader = function(repo, id, name, variable) {
if(variable != null) return variable;
// Control the flow
var done = false;
var value = null;
// Fetch all the commits
Commit.find_all(repo, id, {max_count:1}, function(err, commits) {
if(err) return done = true;
value = commits[0][name];
done = true;
})
while(!done) {};
return value ? value : '';
}
// Load a commit
Commit.prototype.load = function(callback) {
var self = this;
Commit.find_all(this.repo, this.id, {max_count:1}, function(err, commits) {
if(err) return callback(err, commits);
var commit = commits[0];
Object.keys(commit).forEach(function(key) {
self[key] = commit[key];
});
callback(null, self);
});
}
// Chomp text removing end carriage returns
var chomp = function chomp(raw_text) {
return raw_text.replace(/(\n|\r)+$/, '');
}
// Fetch the short form of an id
Commit.prototype.id_abbrev = function(callback) {
var self = this;
if(this._id_abbrev) return callback(null, this._id_abbrev);
this.repo.git.rev_parse({}, this.id, 0, function(err, id) {
if(err) return callback(err, id);
self._id_abbrev = chomp(id).substr(0, 7);
callback(null, self._id_abbrev);
})
}
// Parse the actor and create the object
var actor = function(line) {
var results = line.match(/^.+? (.*) (\d+) .*$/);
var actor = results[1];
var epoch = results[2];
// Return the objects
return [Actor.from_string(actor), new Date(parseInt(epoch) * 1000)]
}
// Convert commit text to list of commits
Commit.list_from_string = function(repo, text) {
// Split up the result
var lines = text.split("\n");
// require('util').debug("-------------------------------------------------- lines")
// require('util').debug(require('util').inspect(lines))
// require('util').debug("-------------------------------------------------- text end")
var linesshift = function() {
return lines.shift();
};
var commits = [];
// Parse all commit messages
while(lines.length > 0) {
var id = linesshift().split(/ /).pop();
if(lines.length == 0) break;
var tree = new Tree(repo, linesshift().split(/ /).pop());
// Let's get the parents
var parents = [];
while(lines[0].match(/^parent/)) {
parents.push(new Commit(repo, linesshift().split(/ /).pop()))
}
// Let's get the author and committer
var actor_info = actor(linesshift());
var author = actor_info[0];
var authored_date = actor_info[1]
var committer_info = actor(linesshift());
var comitter = committer_info[0];
var committed_date = committer_info[1];
// Unpack encoding
var encoding = lines[0].match(/^encoding/) ? linesshift().split().pop() : '';
// Jump empty space
linesshift();
// Unpack message lines
var message_lines = [];
while(lines.length > 0 && lines[0].match(/^ {4}/)) {
var message_line = linesshift();
message_lines.push(message_line.substring(4, message_line.length)) ;
}
linesshift();
// Parse --raw lines
var filechanges = {};
var fcre = /:(\d+) (\d+) ([a-z0-9]+) ([a-z0-9]+) (\S+)\s+(.+)/;
var numre = /(\S+)\s+(\S+)\s+(.+)/;
var line;
var matched;
while (lines.length > 0) {
line = linesshift();
matched = line.match(fcre);
if (!matched) break;
var o = {};
var xs = ['a_mode', 'b_mode', 'a_blob', 'b_blob', 'what', 'path'];
for(var i = 0; i < xs.length; i++) {
o[xs[i]] = matched[i+1];
}
filechanges[o.path] = o;
}
while (line) {
matched = line.match(numre);
if (!matched) break;
var o = {};
var xs = ['plus', 'minus', 'path'];
for(var i = 0; i < xs.length; i++) {
o[xs[i]] = matched[i+1];
}
filechanges[o.path].plus = o.plus;
filechanges[o.path].minus = o.minus;
if (lines.length == 0) break;
line = linesshift();
}
if (!matched && line) lines = [line].concat(lines);
// Move and point to next message
while(lines[0] != null && lines[0] == '') linesshift();
// Create commit object
commits.push(new Commit(repo, id, parents, tree, author, authored_date, comitter, committed_date, message_lines, filechanges));
}
// Return all the commits
return commits;
}
// Locate all commits for a give set of parameters
Commit.find_all = function(repo, reference, options, callback) {
var self = this;
var args = Array.prototype.slice.call(arguments, 1);
callback = args.pop();
reference = args.length ? args.shift() : null;
options = args.length ? args.shift() : {};
// Merge the options with the default_options
if(!options.pretty) options['pretty'] = 'raw';
// If we have a reference use that for the lookup
if(!reference) options['all'] = true;
// Locate revisions
if(reference) {
repo.git.rev_list(options, reference, function(err, revision_output) {
if(err) return callback(err, []);
// Turn string into a list of revisions
callback(null, Commit.list_from_string(repo, revision_output));
});
} else {
repo.git.rev_list(options, function(err, revision_output) {
if(err) return callback(err, []);
// Turn string into a list of revisions
callback(null, Commit.list_from_string(repo, revision_output));
});
}
}
// Return the count of committs for a given start
Commit.count = function(repo, ref, callback) {
repo.git.rev_list({}, ref, function(err, revision_output) {
if(err) return callback(err, revision_output);
callback(null, parseInt((revision_output.length/41)));
})
}
// Show diffs between two trees
// repo: the repo object
// a: named commit
// b: optional named commit, passing an array assumes you wish to omit the second
// named commit and limit the diff to the given paths
// paths: an array of paths to limit the diff.
//
// Returns array of diffs (baked)
Commit.diff = function(repo, a, b, paths, callback) {
var self = this;
var args = Array.prototype.slice.call(arguments, 2);
callback = args.pop();
b = args.length ? args.shift() : null;
paths = args.length ? args.shift() : [];
// If b is an array we skipped the b parameter
if(Array.isArray(b)) {
paths = b;
b = null;
}
// Set up parameters correctly
if(paths.length > 0) {
if(paths.length > 0) paths.unshift("--");
if(b) paths.unshift(b);
paths.unshift(a);
// Let's execute the native git function
repo.git.call_git('', 'diff', '', {full_index:true}, paths, function(err, text) {
// Create a list of diffs from the string
if(text) {
Diff.list_from_string(repo, text, callback);
} else {
callback(null, []);
}
});
} else {
repo.git.diff(a, b, {full_index:true}, function(err, text) {
// Create a list of diffs from the string
if(text) {
Diff.list_from_string(repo, text, callback);
} else {
callback(null, []);
}
});
}
}
var process_diff = function(repo, diff, callback) {
if(diff.match(/diff --git a/)) {
diff = diff.substring(diff.match(/diff --git a/).index, diff.length);
} else {
diff = '';
}
// Return the diffs
Diff.list_from_string(repo, diff, callback);
}
// Show the commits
Commit.prototype.show = function(callback) {
var parents = this.parents;
var diff = null
var self = this;
if(parents.length > 1) {
this.repo.git.native_call("diff " + parents[0].id + "..." + parents[1].id, {full_index:true}, function(err, diff) {
if(err) return callback(err, diff);
process_diff(this.repo, diff, callback);
});
} else {
this.repo.git.show({full_index:true, pretty:'raw'}, this.id, function(err, diff) {
if(err) return callback(err, diff);
process_diff(this.repo, diff, callback);
});
}
}
// Return the diffs for a commit
Commit.prototype.diffs = function(callback) {
var parents = this.parents;
// If we have no parents
if(parents.length == 0) {
this.show(callback);
} else {
Commit.diff(this.repo, parents[0].id, this.id, callback)
}
}
// To String method
Commit.prototype.toString = function() {
return this.id;
}
// Convert commit into patch
Commit.prototype.toPatch = function(callback) {
this.repo.git.format_patch({'1':true, stdout:true}, this.id, callback);
}

115
node_modules/git/lib/git/commit_stats.js generated vendored Normal file
View File

@ -0,0 +1,115 @@
var CommitStats = exports.CommitStats = function(repo, id, files) {
var _repo = repo, _id = id, _files = files, _additions = 0, _deletions = 0, _total = 0;
// Build the stats based on the files
_additions = files.reduce(function(previousValue, currentValue, index, array) { return previousValue + currentValue[1]; }, 0);
_deletions = files.reduce(function(previousValue, currentValue, index, array) { return previousValue + currentValue[2]; }, 0);
_total = files.reduce(function(previousValue, currentValue, index, array) { return previousValue + currentValue[3]; }, 0);
// Internal properties
Object.defineProperty(this, "repo", { get: function() { return _repo; }, set: function(value) { _repo = value; }, enumerable: false});
Object.defineProperty(this, "id", { get: function() { return _id; }, set: function(value) { _id = value; }, enumerable: true});
Object.defineProperty(this, "files", { get: function() { return _files; }, set: function(value) { _files = value; }, enumerable: true});
Object.defineProperty(this, "additions", { get: function() { return _additions; }, set: function(value) { _additions = value; }, enumerable: true});
Object.defineProperty(this, "deletions", { get: function() { return _deletions; }, set: function(value) { _deletions = value; }, enumerable: true});
Object.defineProperty(this, "total", { get: function() { return _total; }, set: function(value) { _total = value; }, enumerable: true});
}
// Find all commit stats matching the given criteria
// repo: the repo
// ref: the ref from which to begin (SHA1 or name) or nil for all
// options: hash of optional arguments to git
// max_count: maximum number of commits to fetch
// skip: number of commits to skip
//
// Returns assoc array (all values are lazy loading)
CommitStats.find_all = function(repo, reference, options, callback) {
var args = Array.prototype.slice.call(arguments, 2);
callback = args.pop();
var self = this;
// Unpack variables
options = args.length ? args.shift() : {};
// Set up options
options['numstat'] = true;
// Check if we have a reference
if(reference) {
// Execute log function
repo.git.log(options, reference, function(err, output) {
if(err) return callback(err, output);
callback(null, CommitStats.list_from_string(repo, output));
});
} else {
// Add all options
options['all'] = true;
// Execute log function
repo.git.log(options, function(err, output) {
if(err) return callback(err, output);
callback(null, CommitStats.list_from_string(repo, output));
});
}
}
// Parse out commit information into an array of baked Commit objects
// repo: the repo
// text: the text output from the git command (raw format)
//
// Returns assoc array of baked commits
CommitStats.list_from_string = function(repo, text) {
var lines = text.trim().split('\n');
var commits = {};
while(lines.length > 0) {
// Fetch the commit id
var id = lines.shift().replace(/\ +/g, ' ').split(" ").pop();
// Remove some rows
lines.shift();
lines.shift();
lines.shift();
// Process message lines
var message_lines = [];
// Process out the messages
while(lines.length > 0 && lines[0].match(/^ {4}/) || lines[0] == '') {
var string = lines.shift().substr(4);
message_lines.push(string);
}
// Skip all empty lines
while(lines.length > 0 && lines[0] != null && lines[0] == '') lines.shift();
var files = [];
// Process all the files
while(lines.length > 0 && lines[0].match(/^([-\d]+)\s+([-\d]+)\s+(.+)/)) {
var parts = lines.shift().replace(/\ +/g, ' ').split(" ");
var additions = parseInt(parts[0]);
var deletions = parseInt(parts[1]);
var filename = parts[2];
var total = additions + deletions;
files.push([filename, additions, deletions, total]);
}
// Skip all empty lines
while(lines.length > 0 && lines[0] != null && lines[0] == '') lines.shift();
// Add the commit to the list
commits[id] = new CommitStats(repo, id, files);
}
// Return the commits
return commits;
}

48
node_modules/git/lib/git/config.js generated vendored Normal file
View File

@ -0,0 +1,48 @@
var Config = exports.Config = function(repo) {
var _repo = repo, _data = null;
Object.defineProperty(this, "repo", { get: function() { return _repo; }, set: function(value) { _id = value; }, enumerable: false});
Object.defineProperty(this, "data", { get: function() {
_data = lazy_reader(_repo, 'data', _data);
return _data;
}, set: function(value) { _data = value; }, enumerable: true});
}
var lazy_reader = function(repo, name, variable) {
if(variable) return variable;
// Control the flow
var done = false;
var hash = {};
// Load the config and parse it
repo.git.config({list:true}, function(err, output) {
var lines = output.split("\n");
lines.forEach(function(line) {
var parts = line.split(/=/);
var key = parts.shift();
hash[key] = parts.join("=");
})
done = true;
})
while(!done) {};
return hash;
}
Config.prototype.fetch = function(key, default_value) {
var value = this.data[key];
if(!value) return default_value;
return this.data[key];
}
Config.prototype.set = function(key, value, callback) {
var self = this;
this.repo.git.config({}, key, value, function(err, output) {
if(err) return callback(err, output);
// Reset data variable
self.data = null;
// Return
callback(null, output);
});
}

82
node_modules/git/lib/git/diff.js generated vendored Normal file
View File

@ -0,0 +1,82 @@
var Blob = require('./blob').Blob;
var Diff = exports.Diff = function(repo, a_path, b_path, a_blob, b_blob, a_mode, b_mode, new_file, deleted_file, diff) {
var _repo = repo, _a_path = a_path, _b_path = b_path
var _a_mode = a_mode, _b_mode = b_mode, _diff = diff;
// Create blob objects
var _a_blob = !a_blob || a_blob.match(/^0{40}$/) ? null : new Blob(repo, a_blob);
var _b_blob = !b_blob || b_blob.match(/^0{40}$/) ? null : new Blob(repo, b_blob);
// Chec if we have a new_file/deleted_file
var _new_file = new_file || _a_blob == null;
var _deleted_file = deleted_file || _b_blob == null;
Object.defineProperty(this, "repo", { get: function() { return _repo; }, enumerable: true});
Object.defineProperty(this, "a_path", { get: function() { return _a_path; }, enumerable: true});
Object.defineProperty(this, "b_path", { get: function() { return _b_path; }, enumerable: true});
Object.defineProperty(this, "a_mode", { get: function() { return _a_mode; }, enumerable: true});
Object.defineProperty(this, "b_mode", { get: function() { return _b_mode; }, enumerable: true});
Object.defineProperty(this, "diff", { get: function() { return _diff; }, enumerable: true});
Object.defineProperty(this, "a_blob", { get: function() { return _a_blob; }, enumerable: true});
Object.defineProperty(this, "b_blob", { get: function() { return _b_blob; }, enumerable: true});
Object.defineProperty(this, "new_file", { get: function() { return _new_file; }, enumerable: true});
Object.defineProperty(this, "deleted_file", { get: function() { return _deleted_file; }, enumerable: true});
}
// Create a list of diffs from a diff text
Diff.list_from_string = function(repo, text, callback) {
// Ensure we don't have white space at the end
text = text.trim();
// Split the text into lines
var lines = text.split("\n");
var diffs = [];
var a_path, b_path, a_mode, b_mode, new_file = false, deleted_file = false;
var a_blob, b_blob;
while(text.length > 0 && lines.length > 0) {
// Extract a line
var parts = lines.shift().match(/^diff --git a\/(.+?) b\/(.+)$/);
// Unpack parts
var a_path = parts[1];
var b_path = parts[2];
if(lines[0].match(/^old mode/)) {
a_mode = lines.shift().match(/^old mode (\d+)/)[1]
b_mode = lines.shift().match(/^new mode (\d+)/)[1]
}
if(lines.length == 0 || lines[0].match(/^diff --git/)) {
diffs.push(new Diff(repo, a_path, b_path, null, null, a_mode, b_mode, false, false, null));
} else {
if(lines[0].match(/^new file/)) {
b_mode = lines.shift().match(/^new file mode (.+)$/)[1];
a_mode = null;
new_file = true;
} else if(lines[0].match(/^deleted file/)) {
a_mode = lines.shift().match(/^deleted file mode (.+)$/)[1];
b_mode = null;
deleted_file = true;
}
// Unpack index reference
parts = lines.shift().match(/^index ([0-9A-Fa-f]+)\.\.([0-9A-Fa-f]+) ?(.+)?$/);
a_blob = parts[1];
b_blob = parts[2];
b_mode = parts[3];
// Contains all the diff lines
var diff_lines = [];
// Fetch all the diff lines
while(lines.length > 0 && !lines[0].match(/^diff/)) {
diff_lines.push(lines.shift());
}
// Join the difflines
var diff = diff_lines.join("\n");
// Add the diff to the list
diffs.push(new Diff(repo, a_path, b_path, a_blob, b_blob, a_mode, b_mode, new_file, deleted_file, diff));
}
}
// Return the list of diffs
callback(null, diffs);
}

241
node_modules/git/lib/git/file_index.js generated vendored Normal file
View File

@ -0,0 +1,241 @@
var util = require('util'),
fs = require('fs');
var FileIndex = exports.FileIndex = function(repo_path, callback) {
var _repo_path = repo_path;
var _index_file = repo_path + "/file-index";
var self = this;
// Set up internal index info
var _sha_count = 0, _commit_index = {}, _commit_order = {}, _all_files = {};
// Set up properites for instance
Object.defineProperty(this, "repo_path", { get: function() { return _repo_path; }, enumerable: true});
Object.defineProperty(this, "index_file", { get: function() { return _index_file; }, enumerable: true});
// Other values that allow setting
Object.defineProperty(this, "sha_count", { get: function() { return _sha_count; }, set: function(value) { _sha_count = value; }, enumerable: true});
Object.defineProperty(this, "commit_index", { get: function() { return _commit_index; }, set: function(value) { _commit_index = value; }, enumerable: true});
Object.defineProperty(this, "commit_order", { get: function() { return _commit_order; }, set: function(value) { _commit_order = value; }, enumerable: true});
Object.defineProperty(this, "all_files", { get: function() { return _all_files; }, set: function(value) { _all_files = value; }, enumerable: true});
fs.stat(_index_file, function(err, stat) {
if(err) return callback(err, stat);
if(stat.isFile() && stat.size < FileIndex.max_file_size) {
read_index(self, _index_file, function(err, _index) {
if(err) return callback(err, _index);
callback(null, _index);
})
} else {
callback("index file not found", null);
}
});
}
// Max size for file index
FileIndex.max_file_size = 10000000;
// Chomp text removing end carriage returns
var chomp = function chomp(raw_text) {
return raw_text.replace(/(\n|\r)+$/, '');
}
var dirname = function(file_name) {
var elements = file_name.split('/');
elements.pop();
if(elements.length == 0) return ".";
return elements.join("/");
}
// TODO TODO TODO TODO TODO TODO TODO TODO TODO TODO TODO TODO TODO TODO TODO TODO
// TODO Needs to be async reading files in pieces and parsing them
// TODO TODO TODO TODO TODO TODO TODO TODO TODO TODO TODO TODO TODO TODO TODO TODO
// Read and parse the file index for git
var read_index = function(file_index, _index_file, callback) {
var current_sha = null;
fs.readFile(_index_file, 'ascii', function(err, data) {
if(err) return callback(err, data);
// Split the text into lines
var lines = data.split("\n");
// Iterate over all the lines
for(var i = 0; i < lines.length; i++) {
var line = lines[i];
// Ensure it's a line with a starting sha
if(line.match(/^(\w{40})/)) {
// Unpack all the sha values (first one being the current_sha and the rest the parents)
var shas = line.match(/(\w{40})/g);
current_sha = shas.shift();
// The rest of the sha's are the parents
file_index.commit_index[current_sha] = {files:[], parents:shas}
file_index.commit_order[current_sha] = file_index.sha_count;
file_index.sha_count = file_index.sha_count + 1;
} else {
var file_name = chomp(line);
var tree = '';
// Retrieve the directory name for the file passed in
var dir = dirname(file_name);
// Ensure it's not an empty line
if(line.length > 0) {
// Split up the directory
var dir_parts = dir.split("/");
for(var j = 0; j < dir_parts.length; j++) {
var part = dir_parts[j];
if(dir_parts[j] != '.') {
tree = tree + part + '/'
if(file_index.all_files[tree] == null) file_index.all_files[tree] = [];
if(file_index.all_files[tree].indexOf(current_sha) == -1)
file_index.all_files[tree].unshift(current_sha);
}
}
// Finish up
if(!file_index.all_files[file_name]) file_index.all_files[file_name] = [];
file_index.all_files[file_name].unshift(current_sha);
file_index.commit_index[current_sha].files.push(file_name);
}
}
}
// Return the parsed index
callback(null, file_index);
});
}
// Builds a list of all commits reachable from a single commit
FileIndex.prototype.commits_from = function(commit_sha, callback) {
if(Array.isArray(commit_sha)) return callback("unsuported reference", null);
// Define some holding structures
var already = {};
var final = [];
var left_to_do = [commit_sha];
var self = this;
while(left_to_do.length > 0) {
commit_sha = left_to_do.shift();
if(!already[commit_sha]) {
// Add commit to list of final commits
final.push(commit_sha);
already[commit_sha] = true;
// Get parents of the commit and add them to the list
var commit = self.commit_index[commit_sha];
if(commit) {
commit.parents.forEach(function(sha) {
left_to_do.push(sha);
});
}
}
}
// Sort the commits
final = this.sort_commits(final);
// Callback
callback(null, final);
}
FileIndex.prototype.sort_commits = function(sha_array) {
var self = this;
return sha_array.sort(function(a, b) {
return compare(parseInt(self.commit_order[b]), parseInt(self.commit_order[a]));
})
}
var convert = function(d) {
return (
d.constructor === Date ? d :
d.constructor === Array ? new Date(d[0],d[1],d[2]) :
d.constructor === Number ? new Date(d) :
d.constructor === String ? new Date(d) :
typeof d === "object" ? new Date(d.year,d.month,d.date) :
NaN
);
}
var compare = function(a,b) {
return (
isFinite(a=convert(a).valueOf()) &&
isFinite(b=convert(b).valueOf()) ?
(a>b)-(a<b) :
NaN
);
}
// Returns files changed at commit sha
FileIndex.prototype.files = function(commit_sha, callback) {
if(!this.commit_index[commit_sha]) return callback("no files found for sha: " + commit_sha, null);
callback(null, this.commit_index[commit_sha].files);
}
// Returns count of all commits
FileIndex.prototype.count_all = function(callback) {
callback(null, this.sha_count);
}
// returns count of all commits reachable from SHA
FileIndex.prototype.count = function(commit_sha, callback) {
this.commits_from(commit_sha, function(err, commits) {
if(err) return callback(err, commits);
callback(null, commits.length);
})
}
// returns all commits for a provided file
FileIndex.prototype.commits_for = function(file, callback) {
if(!this.all_files[file]) return callback("could not locate any commits for file: " + file, null);
callback(null, this.all_files[file])
}
// returns the shas of the last commits for all
// the files in [] from commit_sha
// files_matcher can be a regexp or an array
FileIndex.prototype.last_commits = function(commit_sha, files_matcher, callback) {
var self = this;
this.commits_from(commit_sha, function(err, acceptable) {
if(err) return callback(err, acceptable);
var matches = {};
if(files_matcher.constructor == RegExp) {
// Filter all the files by the matching regular expression
files_matcher = Object.keys(self.all_files).filter(function(file) {
return file.match(files_matcher);
});
}
if(Array.isArray(files_matcher)) {
// Locate the last commit for each file in the files_matcher array
for(var files_matcher_index = 0; files_matcher_index < files_matcher.length; files_matcher_index++) {
var files = self.all_files[files_matcher[files_matcher_index]];
for(var files_index = 0; files_index < files.length; files_index++) {
// If the file is included in the list of commits_from then add it to the matches
if(acceptable.indexOf(files[files_index]) != -1) {
matches[files_matcher[files_matcher_index]] = files[files_index];
break;
}
}
}
}
// Return matches
callback(null, matches);
});
}

66
node_modules/git/lib/git/file_window.js generated vendored Normal file
View File

@ -0,0 +1,66 @@
var util = require('util'),
fs = require('fs'),
Buffer = require('buffer').Buffer;
var FileWindow = exports.FileWindow = function(idxfile, version) {
var _idxfile = idxfile, _version = version, _global_offset, _offset = null, _seek_offset = 0;
// Set file global offset
_global_offset = _version == 2 ? 8 : 0;
//Internal properties
Object.defineProperty(this, "idxfile", { get: function() { return _idxfile; }, set: function(value) { _idxfile = value; }, enumerable: true});
Object.defineProperty(this, "version", { get: function() { return _version; }, enumerable: true});
Object.defineProperty(this, "global_offset", { get: function() { return _global_offset; }, enumerable: true});
Object.defineProperty(this, "offset", { get: function() { return _offset; }, set: function(value) { _offset = value; }, enumerable: true});
Object.defineProperty(this, "seek_offset", { get: function() { return _seek_offset; }, set: function(value) { _seek_offset = value; }, enumerable: true});
}
FileWindow.prototype.unmap = function() {
this.idxfile = null;
}
FileWindow.prototype.index = function(idx) {
var offset = null, len = null, seek_offset = null;
// open the file
var idx_handle = fs.openSync(this.idxfile, "r");
if(idx.length == 1) idx = idx[0];
// Number support
if(idx.constructor == Number && idx === parseInt(value, 10)) {
offset = idx;
len = null;
} else if(Array.isArray(idx)) {
offset = idx[0];
len = idx[1]
} else {
throw "invalid index param: " + util.inspect(idx);
}
// Seek position equivalent using a position in the read
if(this.offset != offset) {
this.seek_offset = offset + this.global_offset;
}
// Adjust the stored offset
this.offset = (offset + len) ? len : 1;
if(!len) len = 1;
// Read the offset value
var buffer = new Buffer(len);
if(len) {
fs.readSync(idx_handle, buffer, 0, len, this.seek_offset);
} else {
fs.readSync(idx_handle, buffer, 0, 1, this.seek_offset);
}
// Update seek_offset
this.seek_offset = this.seek_offset + len;
// Close the file don't keep file handles around
fs.closeSync(idx_handle);
return buffer;
}
FileWindow.prototype.close = function() {
// fs.closeSync(this.idxfile);
this.unmap();
}

891
node_modules/git/lib/git/git.js generated vendored Normal file
View File

@ -0,0 +1,891 @@
var util = require('util'),
fs = require('fs'),
GitFileOperations = require('./git_file_operations').GitFileOperations,
exec = require('child_process').exec,
FileIndex = require('./file_index').FileIndex,
Repository = require('./repository').Repository,
Difference = require('../diff/diff').Difference;
var Git = exports.Git = function(git_directory) {
var _git_diretory = git_directory, _git_file_index;
var _repository = new Repository(_git_diretory, {});
// Control access to internal variables
Object.defineProperty(this, "git_directory", { get: function() { return _git_diretory; }, set: function(value) { _git_diretory = value; }, enumerable: true});
Object.defineProperty(this, "git_file_index", { get: function() { return _git_file_index; }, set: function(value) { _git_file_index = value; }, enumerable: true});
Object.defineProperty(this, "repository", { get: function() { return _repository; }, set: function(value) { _repository = value; }, enumerable: true});
}
// Set up the gitbinary
if(process.platform.toLowerCase().match(/mswin(?!ce)|mingw|bccwin|win32/)) {
Git.git_binary = "git";
} else {
Git.git_binary = "/usr/bin/env git";
}
// Chomp text removing end carriage returns
var chomp = function chomp(raw_text) {
return raw_text ? raw_text.replace(/(\n|\r)+$/, '') : '';
}
var read_file = function(path, callback) {
fs.stat(path, function(err, stat) {
if(err) return callback(err, null);
fs.readFile(path, 'ascii', callback);
})
}
// Retrieve references
Git.prototype.refs = function(options, prefix, callback) {
var refs = [];
var already = {};
var self = this;
// Locate all files in underlying directories
var stream = GitFileOperations.glob_streaming(this.git_directory + "/" + prefix);
// Triggers on each entry in the directory
stream.addListener("data", function(result) {
// If we have a directory check if we have a reference file
if(result.stat.isFile()) {
// Read the file content
try {
var id = chomp(fs.readFileSync(result.path, 'ascii'));
var name = result.path.replace(self.git_directory + "/" + prefix + "/", '');
if(!already[name]) {
refs.push(name + " " + id);
already[name] = true;
}
} catch(err) {
// Seems to be some instances where it's not able to tell that a directory is not a file ?
}
}
});
// Triggers at the end of the call
stream.addListener("end", function(err, result) {
fs.stat(self.git_directory + "/packed-refs", function(err, stat) {
if(err || !stat.isFile()) return callback(null, refs.join("\n"));
read_file(self.git_directory + "/packed-refs", function(err, data) {
var parts = data.split(/\n/);
// Scan all lines
for(var i = 0; i < parts.length; i++) {
var match = parts[i].match(/^(\w{40}) (.*?)$/)
if(match) {
if(match[2].match("^" + prefix)) {
var id = chomp(match[1]);
var name = match[2].replace(prefix + "/", '');
if(!already[name]) {
refs.push(name + " " + id);
already[name] = true;
}
}
}
}
// Return all the references
callback(null, refs.join("\n"));
});
})
})
}
// Read a specific file
Git.prototype.fs_read = function(file, callback) {
GitFileOperations.fs_read(this.git_directory, file, callback);
}
// // Parse revisions
// Git.prototype.rev_parse = function(options, string, callback) {
// if(string == null || string.constructor != String) return callback("invalid string: " + string);
// var self = this;
//
// // Make sure we don't have a directory up ..
// if(string.match(/\.\./)) {
// var shas = string.split(/\.\./);
// var sha1 = shas[0], sha2 = shas[1];
// // Need to rev_parse the two keys and return the data
// new Simplifier().execute(new ParallelFlow(
// function(callback) { self.rev_parse({}, sha1, callback); },
// function(callback) { self.rev_parse({}, sha2, callback); }
// ), function(sha1_results, sha2_results) {
// // Return the collected files
// return callback(null, [sha1_results[1], sha2_results[1]]);
// });
// }
//
// // If we have a sha being returned nop it
// if(string.match(/^[0-9a-f]{40}$/)) {
// return callback(null, chomp(string));
// }
//
// // Check in heads directory
// read_file(self.git_directory + "/refs/heads/" + string, function(err, data) {
// if(!err) return fs.readFile(self.git_directory + "/refs/heads/" + string, function(err, data) { callback(err, chomp(data)); });
// // If not in heads then check in remotes
// read_file(self.git_directory + "/refs/remotes/" + string, function(err, data) {
// if(!err) return fs.readFile(self.git_directory + "/refs/remotes/" + string, function(err, data) { callback(err, chomp(data)); });
// // If not in remotes check in tags
// read_file(self.git_directory + "/refs/tags/" + string, function(err, data) {
// if(!err) return fs.readFile(self.git_directory + "/refs/tags/" + string, function(err, data) { callback(err, chomp(data)); });
//
// // Not pin any of the main refs, look in packed packed-refs
// read_file(self.git_directory + "/packed-refs", function(err, data) {
// if(err) return callback(err, data);
// // Split the data on new line
// var ref = null;
// var parts = data.split(/\n/);
// // Locate head
// for(var i = 0; i < parts.length; i++) {
// var match_parts = parts[i].match(/^(\w{40}) refs\/.+?\/(.*?)$/);
// if(match_parts) {
// ref = match_parts[1];
// // If we have a match fetch reference and return
// if(new RegExp(string + '$').test(match_parts[3])) {
// break;
// }
// }
// }
//
// // If we have a reference lets terminate
// if(ref) return callback(null, ref);
//
// // !! more partials and such !!
//
//
// // revert to calling git
// self.call_git('', 'rev-parse', '', options, string, function(err, result) {
// result = result ? chomp(result) : result;
// callback(err, result);
// })
// });
// });
// });
// });
// }
Git.prototype.transform_options = function(options) {
var args = [];
var keys = Object.keys(options);
// Process all entries
Object.keys(options).forEach(function(key) {
if(key.length == 1) {
if(options[key] == true && options[key].constructor == Boolean) { args.push('-' + key);
} else if(options[key] == false && options[key].constructor == Boolean) {
} else { args.push('-' + key + ' "' + options[key].toString().replace('"', "\\\"") + '"'); }
} else {
if(options[key] == true && options[key].constructor == Boolean) { args.push("--" + key.toString().replace(/_/, '-'));
} else if(options[key] == false && options[key].constructor == Boolean) {
} else { args.push('--' + key.toString().replace(/_/, '-') + '="' + options[key].toString().replace('"', "\\\"") + '"'); }
}
});
// Return formated parametes
return args;
}
Git.prototype.git = function() {
// Unpack parameters as commit might be null
var args = Array.prototype.slice.call(arguments, 0);
var callback = args.pop();
// Unpack the variables
var function_name = args.length ? args.shift() : null;
var options = args.length ? args.shift() : {};
var arguments = args;
// Execute blame command
this.call_git('', function_name, '', options, arguments, function(err, result) {
callback(err, result);
});
}
var shell_escape = function(str) {
return str.toString().replace('"', "\\\"").replace(/\;/g, "\\;");
}
// Call the native git binary
Git.prototype.call_git = function(prefix, command, postfix, options, args, callback) {
// Do we have a timeout
var timeout = options['timeout'] ? timeout : 1000 * 60;
var call_string = '';
// Remove the timeout property if we have one
if(options['timeout']) delete options['timeout'];
var option_arguments = this.transform_options(options);
if(process.platform.toLowerCase().match(/mswin(?!ce)|mingw|bccwin/)) {
} else {
// Map the extra parameters
var ext_args = args.map(function(arg) { return (arg == '--' || arg.substr(0, 1) == '|' ? arg : ('"' + shell_escape(arg) + '"'))})
.filter(function(arg) { return arg == null || arg == '' ? false : true});
// Join the arguments
var final_arguments = option_arguments.concat(ext_args);
// Build a call
call_string = prefix + Git.git_binary + ' --git-dir="'+ this.git_directory + '" ' + command.toString().replace(/_/, '-') + ' ' + final_arguments.join(" ") + postfix;
}
// Execute the function
execute_git_call(call_string, { encoding: 'utf8', timeout: timeout, killSignal: 'SIGKILL'}, callback);
}
var execute_git_call = function(call_string, options, callback) {
// Execute the git command
options.maxBuffer = 1024 * 1024;
exec(call_string, options,
function (error, stdout, stderr) {
if (error != null) {
var result = error.toString();
callback(result != null ? result.trim() : result, null);
} else {
var result = stdout.toString();
callback(null, result != null ? result.trim() : result)
}
});
}
var file_index = function(git, callback) {
// If we have a file index object return it otherwise create a new one
if(!git.git_file_index) {
new FileIndex(git.git_directory, function(err, _file_index) {
git.git_file_index = _file_index;
callback(null, _file_index);
});
} else {
callback(null, git.git_file_index);
}
}
// Fetch a revision list
Git.prototype.rev_list = function(options, reference, callback) {
var self = this;
var args = Array.prototype.slice.call(arguments, 0);
var callback = args.pop();
options = args.length ? args.shift() : {};
reference = args.length ? args.shift() : 'master';
// Remove skip option if it's set to 0
if(options['skip'] != null && parseInt(options['skip']) == 0) delete options['skip'];
var allowed_options = {"max_count":1, "since":1, "until":1, "pretty":1};
var establish_keys = Object.keys(options).filter(function(key) {
return allowed_options[key] ? false : true;
});
// If we have commands we don't support call through to native git
if(establish_keys.length > 0) {
self.call_git('', 'rev_list', '', options, [reference], function(err, result) {
callback(err, result);
})
} else if(Object.keys(options).length == 0){
// Fetch the file index (will create a file index on the first call)
file_index(self, function(err, _file_index) {
if(err) return callback(err, _file_index);
// Parse the revision
self.rev_parse({}, reference, 0, function(err, ref) {
if(err) return callback(err, ref);
// Fetch the commits from the revision passed in
_file_index.commits_from(ref, function(err, commits) {
if(err) {
self.call_git('', 'rev_list', '', options, [reference], function(err, result) {
callback(err, result);
})
} else {
callback(null, commits.join("\n") + "\n");
}
})
});
})
} else {
self.rev_parse({}, reference, 0, function(err, ref) {
if(err) return callback(err, ref);
if(Array.isArray(ref)) {
self.call_git('', 'rev_list', '', options, [reference], function(err, result) {
callback(err, result);
})
} else {
try {
// Try to execute revision fetch
self.repository.rev_list(ref, options, function(err, result) {
callback(err, result);
})
} catch(err) {
callback(err, null);
}
}
});
}
}
// Chomp text removing end carriage returns
var chomp = function chomp(raw_text) {
return raw_text.replace(/(\n|\r)+$/, '');
}
Git.prototype.rev_parse = function(options, string, level, callback) {
if(string != null && string.constructor != String) return callback('only supports single sha reference');
var self = this;
// Allow leaving of level
var args = Array.prototype.slice.call(arguments, 2);
var callback = args.pop();
level = args.length ? args.shift() : 0;
if(string.match(/\.\./)) {
var parts = string.split("..");
var sha1 = parts[0], sha2 = parts[1];
var value = [this.rev_parse({}, sha1, level + 1, callback), this.rev_parse({}, sha2, level + 1, callback)];
if(level == 0) return callback(null, value);
}
// a sha is being passed in, chomp and return
if(string.match(/^[0-9a-f]{40}$/)) {
var value = chomp(string);
if(level == 0) {
return callback(null, value);
} else {
return value;
}
}
// Check all the references
var head = this.git_directory + "/refs/heads/" + string;
try {
if(level == 0) {
return callback(null, chomp(fs.readFileSync(head, 'utf8')));
} else {
return chomp(fs.readFileSync(head, 'utf8'));
}
} catch(err) {}
var head = this.git_directory + "/refs/remotes/" + string;
try {
if(level == 0) {
return callback(null, chomp(fs.readFileSync(head, 'utf8')));
} else {
return chomp(fs.readFileSync(head, 'utf8'));
}
} catch(err) {}
var head = this.git_directory + "/refs/tags/" + string;
try {
if(level == 0) {
return callback(null, chomp(fs.readFileSync(head, 'utf8')));
} else {
return chomp(fs.readFileSync(head, 'utf8'));
}
} catch(err) {}
// Check packed-refs file, too
var packref = this.git_directory + "/packed-refs";
try {
// Read the file
var parts = data.split(/\n/);
// Locate head
for(var i = 0; i < parts.length; i++) {
var match_parts = parts[i].match(/^(\w{40}) refs\/.+?\/(.*?)$/);
if(match_parts) {
ref = match_parts[1];
// If we have a match fetch reference and return
if(new RegExp(string + '$').test(match_parts[3])) {
if(level == 0) {
return callback(null, chomp(ref));
} else {
return chomp(ref);
}
}
}
}
} catch(err) {}
// Wait until we got the git call
self.call_git('', 'rev-parse', '', options, [string], function(err, result) {
callback(null, result ? chomp(result) : result);
})
}
// List tree content
Git.prototype.ls_tree = function(treeish, paths, options, callback) {
var self = this;
var args = Array.prototype.slice.call(arguments, 1);
var callback = args.pop();
paths = args.length ? args.shift() : [];
paths = paths ? paths : [];
options = args.length ? args.shift() : {};
try {
// Reverse parse the tree sha
this.rev_parse({}, treeish, function(err, sha) {
if(err) return callback(err, sha);
var tree = self.repository.ls_tree(sha, flatten(paths), options['r']);
if(tree == '') return callback('no such sha found', null);
// Ls_tree
callback(null, tree);
})
} catch(err) {
callback(err, null);
}
}
// Cat a file
Git.prototype.cat_file = function(type, ref, callback) {
if(type == "t") {
this.file_type(ref, callback);
} else if(type == "s") {
this.file_size(ref, callback);
} else if(type == "p") {
callback(null, this.repository.cat_file(ref));
}
}
Git.prototype.file_size = function(ref, callback) {
callback(null, this.repository.cat_file_size(ref));
}
// Make a directory
// dir: is the relative path to the directory to create
//
// Return nothing
Git.prototype.fs_mkdir = function(dir, callback) {
var path = this.git_directory + "/" + dir;
GitFileOperations.fs_mkdir(path, callback);
}
// Initialize a new git repository (create physical setup)
Git.prototype.init = function(options, callback) {
var self = this;
var arguments = Array.prototype.slice(arguments);
if(Object.keys(options).length == 0) {
Repository.init(this.git_directory, callback);
} else {
// Execute init with call git and return the object
this.call_git('', 'init', '', options, arguments, function(err, result) {
if(err) return callback(err, result);
callback(null, self);
});
}
}
// Clone a directory
Git.prototype.clone = function(options, original_path, target_path, callback) {
}
// Generate diff from the changes between two shas
// Git.prototype.diff = function(options, sha1, sha2, callback) {
// }
//
// var simple_diff = function(repo, options, sha1, sha2, callback) {
//
// }
//
// var native_diff = function(repo, options, sha1, sha2, base, paths, callback) {
//
// }
// Gotten from
var flatten = function(array) {
return array.reduce(function(a,b) {
return a.concat(b);
}, []);
}
Git.prototype.diff = function(commit1, commit2, options, callback) {
try {
var self = this;
var args = Array.prototype.slice.call(arguments, 2);
// Pop the callback
var callback = args.pop();
options = args.length ? args.shift() : {};
// Initialize patch variable
var patch = '', commit_obj1 = null, tree1 = null, tree2 = null;
// Retrieve the first commit object
var commit_obj1 = self.repository.get_object_by_sha1(commit1);
var tree1 = commit_obj1.tree;
if(commit2) {
tree2 = self.repository.get_object_by_sha1(commit2).tree;
} else {
tree2 = self.repository.get_object_by_sha1(commit_obj1.parent[0]).tree;
}
var qdiff = self.repository.quick_diff(tree1, tree2).sort();
qdiff.forEach(function(diff_arr) {
// Set up all the variables
var path = diff_arr[0];
var status = diff_arr[1];
var treeSHA1 = diff_arr[2];
var treeSHA2 = diff_arr[3];
var format = 'unified';
var lines = 3;
var output = '';
var file_length_difference = 0;
// Fetch the files
var fileA = treeSHA1 ? self.repository.cat_file(treeSHA1) : '';
var fileB = treeSHA2 ? self.repository.cat_file(treeSHA2) : '';
// Get the sha's or set empty shas
var sha1 = treeSHA1 || '0000000000000000000000000000000000000000';
var sha2 = treeSHA2 || '0000000000000000000000000000000000000000';
// Split up data
var data_old = fileA.trim().split(/\n/).map(function(e) { return chomp(e); });
var data_new = fileB.trim().split(/\n/).map(function(e) { return chomp(e); });
// Javascript split's a file into [''] if it's an empty file
if(data_old.length == 1 && data_old[0] == '') data_old = [];
if(data_new.length == 1 && data_new[0] == '') data_new = [];
// Get diffs
var diffs = Difference.LCS.diff(data_old, data_new);
if(diffs.length > 0) {
// Create paths
var a_path = "a/" + path.replace(/\.\//g, '');
var b_path = "b/" + path.replace(/\.\//g, '');
// Let's create the header
var header = "diff --git " + a_path + " " + b_path;
if(options['full_index']) {
header = header + '\n' + 'index ' + sha1 + '..' + sha2;
if(treeSHA2) header = header + "' 100644";
} else {
header = header + '\n' + 'index ' + sha1.substr(0, 7) + '..' + sha2.substr(0, 7);
if(treeSHA2) header = header + ' 100644';
}
header = header + '\n--- ' + (treeSHA1 ? a_path : '/dev/null');
header = header + '\n+++ ' + (treeSHA2 ? b_path : '/dev/null');
header = header + '\n';
// standard hunk
var old_hunk = null, hunk = null;
// Process all the diff changes
diffs.forEach(function(piece) {
try {
hunk = new Difference.LCS.Hunk(data_old, data_new, piece, lines, file_length_difference);
file_length_difference = hunk.file_length_difference;
if(old_hunk) {
if(lines > 0 && hunk.overlaps(old_hunk)) {
hunk.unshift(old_hunk);
} else {
output = output + old_hunk.diff(format);
}
}
} catch(err) {}
old_hunk = hunk;
output = output + '\n';
});
// Prepare next
output = output + old_hunk.diff(format);
output = output + '\n';
patch = patch + header + output.trimLeft();
}
});
// Return the patch
callback(null, patch);
} catch(err) {
callback('tree was bad or lcs is not working', null);
}
}
// Check if a file exists
Git.prototype.fs_exist = function(path, callback) {
GitFileOperations.fs_exist(this.git_directory, path, callback);
}
// Write a normal file to the filesystem
// file: relative path from the Git dir
// contents: String content to be written
//
// Return nothing
Git.prototype.fs_write = function(file, content, callback) {
GitFileOperations.fs_write(this.git_directory, file, content, callback);
}
// Log function, returns the number of logs
Git.prototype.log = function(commit, path, options, callback) {
args = ['--raw', '--no-abbrev', '--numstat'];
if (path) {
args.push('--');
args.push(path);
}
options.color = 'never';
this.call_git('', 'log', '', options, args, callback);
}
// Select the objects that exists
// object_ids: array of object sha's
//
// Returns array of ids's that exist
Git.prototype.select_existing_objects = function(object_ids, callback) {
var existing_object_ids = [];
// Process all the object ids
for(var i = 0; i < object_ids.length; i++) {
// Check if the object_id exists in the db
this.repository.object_exists(object_ids[i], function(err, result) {
if(err) return callback(err, result);
if(result) existing_object_ids.push(object_ids[i]);
});
}
// Return all the existing objects
callback(null, existing_object_ids);
}
// Format the patch
Git.prototype.format_patch = function(options, reference, callback) {
this.call_git('', 'format_patch', '', options, [reference], function(err, result) {
callback(err, result);
})
}
// Fetch the blame
Git.prototype.blame = function() {
// Unpack parameters as commit might be null
var args = Array.prototype.slice.call(arguments, 0);
var callback = args.pop();
var options = args.length ? args.shift() : {};
var arguments = args;
// Execute blame command
this.call_git('', 'blame', '', options, arguments, function(err, result) {
callback(err, result);
});
}
var clean_paths = function(commits) {
var new_commits = {};
// Iterate over all the commit hash entries and clean the directory names
Object.keys(commits).forEach(function(file) {
var sha = commits[file];
file = file.substr(file.length - 1, 1) == '/' ? file.substr(0, file.length - 1) : file;
new_commits[file] = sha;
})
// Return all the cleaned commits
return new_commits;
}
// Fetch blame tree
Git.prototype.blame_tree = function(commit, path, callback) {
var self = this;
var args = Array.prototype.slice.call(arguments, 1);
var callback = args.pop();
path = args.length ? args.shift() : null;
// Create path
path = path != null && path != '' ? [path].join("/").toString() + '/' : path;
path = !path || path.constructor != String ? '' : path;
// Fetch the file_index
file_index(this, function(err, file_index_instance) {
if(err) return callback(err, file_index_instance);
self.rev_parse({}, commit, 0, function(err, rev_parse_output) {
if(err) return callback(err, rev_parse_output);
self.looking_for(commit, path, function(err, looking_for) {
if(err) return callback(err, looking_for);
file_index_instance.last_commits(rev_parse_output, looking_for, function(err, commits) {
if(err) return callback(err, commits);
callback(null, clean_paths(commits));
});
});
});
});
}
// Looking for
Git.prototype.looking_for = function(commit, path, callback) {
var self = this;
var args = Array.prototype.slice.call(arguments, 1);
var callback = args.pop();
path = args.length ? args.shift() : null;
var file = null;
// Fetch the commit sha
self.rev_parse({}, commit, 0, function(err, rev_parse_output) {
if(err) return callback(err, rev_parse_output);
// Fetch the sub tree
self.repository.get_subtree(rev_parse_output, path, function(err, tree_sha) {
if(err) return callback(err, tree_sha);
// Contains the files
var looking_for = [];
// Fetch and return the object by the tree sha
var object = self.repository.get_object_by_sha1(tree_sha);
// Process all the entries for the object
object.entries.forEach(function(entry) {
file = path && !(path == '' || path == '.' || path == './') ? path + "/" + entry.name : entry.name;
// Ensure no double path characters
file = file.replace('//', '/');
// Add a slash if it's a directory
if(entry.type == 'directory') file = file + "/";
// Add to list of looking_for entries
looking_for.push(file);
});
// Return the entries
return callback(null, looking_for);
});
});
}
// Peform commit
Git.prototype.commit = function() {
// Unpack parameters as commit might be null
var args = Array.prototype.slice.call(arguments, 0);
var callback = args.pop();
var options = args.length ? args.shift() : {};
var arguments = args;
// Execute blame command
this.call_git('', 'commit', '', options, arguments, function(err, result) {
callback(err, result);
});
}
// Fetch config
Git.prototype.config = function() {
// Unpack parameters as commit might be null
var args = Array.prototype.slice.call(arguments, 0);
var callback = args.pop();
var options = args.length ? args.shift() : {};
var arguments = args;
// Execute blame command
this.call_git('', 'config', '', options, arguments, function(err, result) {
callback(err, result);
});
}
// Execute add command
Git.prototype.add = function() {
// Unpack parameters as commit might be null
var args = Array.prototype.slice.call(arguments, 0);
var callback = args.pop();
var options = args.length ? args.shift() : {};
var arguments = args;
// Execute blame command
this.call_git('', 'add', '', options, arguments, function(err, result) {
callback(err, result);
});
}
// Execute remove command
Git.prototype.remove = function() {
// Unpack parameters as commit might be null
var args = Array.prototype.slice.call(arguments, 0);
var callback = args.pop();
var options = args.length ? args.shift() : {};
var arguments = args;
// Execute blame command
this.call_git('', 'rm', '', options, arguments, function(err, result) {
callback(err, result);
});
}
// Execute ls-files
Git.prototype.ls_files = function() {
// Unpack parameters as commit might be null
var args = Array.prototype.slice.call(arguments, 0);
var callback = args.pop();
var options = args.length ? args.shift() : {};
var arguments = args;
// Execute blame command
this.call_git('', 'ls-files', '', options, arguments, function(err, result) {
callback(err, result);
});
}
// Execute diff-files
Git.prototype.diff_files = function() {
// Unpack parameters as commit might be null
var args = Array.prototype.slice.call(arguments, 0);
var callback = args.pop();
var options = args.length ? args.shift() : {};
var arguments = args;
// Execute blame command
this.call_git('', 'diff-files', '', options, arguments, function(err, result) {
callback(err, result);
});
}
// Execute diff-index
Git.prototype.diff_index = function() {
// Unpack parameters as commit might be null
var args = Array.prototype.slice.call(arguments, 0);
var callback = args.pop();
var options = args.length ? args.shift() : {};
var arguments = args;
// Execute blame command
this.call_git('', 'diff-index', '', options, arguments, function(err, result) {
callback(err, result);
});
}
Git.prototype.file_type = function(ref, callback) {
return callback(null, this.repository.cat_file_type(ref));
}
Git.prototype.put_raw_object = function(content, type, callback) {
return this.repository.put_raw_object(content, type, callback);
}
Git.prototype.commit_from_sha = function(id) {
var repository = new Repository(this.git_directory);
var object = repository.get_object_by_sha1(id);
if(object.type == "commit") {
return id;
} else if(object.type == "tag") {
return object.object;
} else {
return '';
}
}
// // ===================================================================================================
// //
// // Decorates the Class prototype with functions wrapping git native functions (if not defined already)
// //
// // ===================================================================================================
// Git.prototype.call_git('', 'help', '', {}, ['--all'], function(err, result) {
// var index = result.indexOf("-----------");
// result = result.substr(index);
// var lines = result.trim().split("\n");
// // Ship the first line
// lines.shift();
// // Process all the lines
// while(lines.length > 0 && lines[0] != '') {
// var line = lines.shift().trim().replace(/ +/g, ' ');
// var parts = line.split(" ");
//
// parts.forEach(function(command) {
// var function_name = command.replace(/\-/g, '_');
// // For each entry create a new function if it does not exist on the prototype
// if(Git.prototype[function_name] == null) {
// Git.prototype[function_name] = function() {
// // Unpack parameters as commit might be null
// var args = Array.prototype.slice.call(arguments, 0);
// callback = args.pop();
// var options = args.length ? args.shift() : {};
// var arguments = args;
// // Execute blame command
// this.call_git('', command, '', options, arguments, function(err, result) {
// callback(err, result);
// });
// }
// }
// });
//
// }
//
// // callback(null, null);
// pre_loading_done = true
// // var g = new Git("..../")
// });

170
node_modules/git/lib/git/git_file_operations.js generated vendored Normal file
View File

@ -0,0 +1,170 @@
var util = require('util'),
fs = require('fs'),
exec = require('child_process').exec;
var GitFileOperations = exports.GitFileOperations = function() {}
// Streaming glob function
var streaming_glob_function = function(path, stream) {
var entries = fs.readdirSync(path);
entries.forEach(function(entry) {
var entry_path = path + "/" + entry;
var stat = fs.statSync(entry_path);
if(stat.isDirectory()) {
stream.emit("data", {path:entry_path, stat:stat});
streaming_glob_function(entry_path, stream);
} else {
stream.emit("data", {path:entry_path, stat:stat});
}
})
}
var stat_with_entry = function(entry_path, stream, callback) {
return function() {
fs.stat(entry_path, function(err, stat) {
if(stat.isDirectory()) {
// Dive into the directory
streaming_glob_function(entry_path, stream);
// Emit the directory and then update the count
stream.emit("data", {path:entry_path, stat:stat});
callback();
} else if(stat.isFile()) {
// Update the number of processed directories and emit the data event
stream.emit("data", {path:entry_path, stat:stat});
callback();
}
});
}
}
// Glob function for the file system
GitFileOperations.glob_streaming = function(path) {
// Comptability
var nextTick = global.setImmediate || process.nextTick;
// Create a stream object
var stream = new process.EventEmitter();
var processed_directories_count = 0;
var top_level_files_count = -1;
// Tick method
var tick_function = function() {
// If we are done emit end otherwise execute the method again
processed_directories_count == top_level_files_count ? stream.emit("end") : nextTick(tick_function);
}
// set nextTick handler into action
nextTick(tick_function);
// Fetch the top directory
fs.readdir(path, function(err, entries) {
// The top level of files that need to be finished processing for us to be done
if(entries !== undefined && entries.length > 0) {
entries.sort(function(a, b) {
return a > b;
});
top_level_files_count = entries.length;
// Execute the entries
var procesEntry = function(i) {
// Entry path
var entry_path = path + "/" + entries[i];
// Build glob function
stat_with_entry(entry_path, stream, function() {
processed_directories_count = processed_directories_count + 1;
if (++i < entries.length) {
procesEntry(i);
}
})();
};
procesEntry(0);
} else {
top_level_files_count = 0;
}
});
// Return the stream for execution
return stream;
}
// Execute recursive glob function (private function)
var glob_function = function(path, files) {
var entries = fs.readdirSync(path);
entries.forEach(function(entry) {
var entry_path = path + "/" + entry;
var stat = fs.statSync(entry_path);
if(stat.isDirectory()) {
glob_function(entry_path, files);
} else {
files.push(entry_path);
}
})
}
// Glob function for the file system
GitFileOperations.glob = function(path, files, callback) {
var args = Array.prototype.slice.call(arguments, 1);
callback = args.pop();
files = args.length ? args.shift() : [];
// Fetch all the files
glob_function(path, files);
callback(null, files);
}
// Read a file
GitFileOperations.fs_read = function(path, file, callback) {
fs.readFile(path + "/" + file, callback);
}
// Make a directory
GitFileOperations.fs_mkdir = function(dir, callback) {
fs.mkdir(dir, 16877, callback);
}
// Check if a directory exists
GitFileOperations.fs_exist = function(dir, path, callback) {
}
// Delete directory
GitFileOperations.fs_rmdir_r = function(dir, callback) {
// Copy the old directory to the new one
var child = exec('rm -rf ' + dir, function (error, stdout, stderr) {
if (error !== null) {
util.puts('exec error: ' + error);
return callback(error, null);
}
return callback(null, null);
});
}
// Write file
GitFileOperations.fs_write = function(dir, file, content, callback) {
// Let's make sure the parent directories exist, split the file into directories and content
var file_parts = file.split("/");
var file_name = file_parts.pop()
var current_path = dir;
// Create missing sub directories
while(file_parts.length > 0) {
var dir_path = file_parts.shift();
current_path = current_path + "/" + dir_path;
// Check if the directory exists (if it does not then create it)
try {
fs.statSync(current_path);
} catch(err) {
fs.mkdirSync(current_path, 16877);
}
}
// Write the file to disk
current_path = dir + "/" + file;
// Append the entry to the file
fs.writeFile(current_path, content, callback);
}

188
node_modules/git/lib/git/git_index.js generated vendored Normal file
View File

@ -0,0 +1,188 @@
var util = require('util'),
Tree = require('./tree').Tree,
Config = require('./config').Config;
var GitIndex = exports.GitIndex = function(repo) {
var _repo = repo, _tree = {}, _current_tree = null;
Object.defineProperty(this, "tree", { get: function() { return _tree; }, set: function(value) { _tree = value; }, enumerable: true});
Object.defineProperty(this, "current_tree", { get: function() { return _current_tree; }, set: function(value) { _current_tree = value; }, enumerable: true});
Object.defineProperty(this, "repo", { get: function() { return _repo; }, set: function(value) { _repo = value; }, enumerable: true});
}
// Sets the current tree
// +tree+ the branch/tag/sha... to use - a string
//
// Returns index (self)
GitIndex.prototype.read_tree = function(tree, callback) {
var self = this;
// Load the tree
this.repo.tree(tree, function(err, loaded_tree) {
if(err) return callback(err, loaded_tree);
self.current_tree = loaded_tree;
callback(null, loaded_tree);
})
}
// Add a file to the index
// +path+ is the path (including filename)
// +data+ is the binary contents of the file
//
// Returns nothing
GitIndex.prototype.add = function(file_path, data) {
var path = file_path.split('/');
var filename = path.pop();
var current = this.tree;
path.forEach(function(dir) {
current[dir] = current[dir] || {};
var node = current[dir];
current = node;
});
current[filename] = data;
}
// Commit the contents of the index
// +message+ is the commit message [nil]
// +parents+ is one or more commits to attach this commit to to form a new head [nil]
// +actor+ is the details of the user making the commit [nil]
// +last_tree+ is a tree to compare with - to avoid making empty commits [nil]
// +head+ is the branch to write this head to [master]
//
// Returns a String of the SHA1 of the commit
GitIndex.prototype.commit = function(message, parents, actor, last_tree, head, callback) {
var self = this;
var args = Array.prototype.slice.call(arguments, 1);
callback = args.pop();
// Set variables to default values
parents = args.length ? args.shift() : null;
actor = args.length ? args.shift() : null;
last_tree = args.length ? args.shift() : null;
head = args.length ? args.shift() : 'master';
this.write_tree(this.tree, this.current_tree, function(err, tree_sha1) {
if(tree_sha1 == last_tree) return callback(null, false); // Don't write identical commits
var contents = [];
// Add tree to contents
contents.push(['tree', tree_sha1].join(' '));
// Add all parents if they exist
if(parents) {
parents.forEach(function(p) {
if(p) contents.push(['parent', p].join(' '));
});
}
// Define name and email
var name = null, email = null;
if(actor) {
name = actor.name;
email = actor.email;
} else {
config = new Config(self.repo);
name = config['user.name'];
email = config['user.email'];
}
var author_string = "" + name + " <" + email + "> " + parseInt(new Date().getTime()/1000) + " -0700"; // TODO must fix
contents.push(['author', author_string].join(' '));
contents.push(['committer', author_string].join(' '));
contents.push('');
contents.push(message);
// Write commit and update reference tree
self.repo.git.put_raw_object(contents.join("\n"), 'commit', function (commit_sha1) {
self.repo.update_ref(head, commit_sha1, callback);
});
});
}
var to_bin = function(sha1o) {
var sha1 = '';
for(var i = 0; i < sha1o.length; i = i + 2) {
sha1 = sha1 + String.fromCharCode(parseInt(sha1o.substr(i, 2), 16));
}
return sha1;
}
// Recursively write a tree to the index
// +tree+ is the tree
//
// Returns the SHA1 String of the tree
GitIndex.prototype.write_tree = function(tree, now_tree, callback) {
var self = this;
var args = Array.prototype.slice.call(arguments, 1);
callback = args.pop();
// Set variables to default values
now_tree = args.length ? args.shift() : null;
// Holds the tree content
var tree_contents = {};
// Fill in the original tree
if(now_tree) {
now_tree.contents.forEach(function(obj) {
var sha = to_bin(obj.id);
var k = obj.name;
if(obj instanceof Tree) k = k + '/';
tree_contents[k] = "" + obj.mode.toString() + " " + obj.name + "\0" + sha;
});
}
// overwrite with the new tree contents
Object.keys(tree).forEach(function(key) {
var value = tree[key];
if(value.constructor == String) {
var sha = self.write_blob(value);
sha = to_bin(sha);
tree_contents[key] = "" + '100644' + " " + key + '\0' + sha;
} else if(Object.prototype.toString.call(value) === '[object Object]') {
var ctree = now_tree ? (now_tree.find(key)) : null;
// Write the next level
self.write_tree(value, ctree, function(err, sha) {
var sha = to_bin(sha);
tree_contents[key + '/'] = "" + '040000' + " " + key + '\0' + sha;
});
}
})
var tr = Object.keys(tree_contents).sort().map(function(key) {
return tree_contents[key];
}).join('');
// Return the object sha
this.repo.git.put_raw_object(tr, 'tree', function (sha1) {
callback(null, sha1);
})
}
// Write the blob to the index
// +data+ is the data to write
//
// Returns the SHA1 String of the blob
GitIndex.prototype.write_blob = function(data) {
return this.repo.git.put_raw_object(data, 'blob');
}

20
node_modules/git/lib/git/git_object.js generated vendored Normal file
View File

@ -0,0 +1,20 @@
var GitCommit = require('./internal/git_commit').GitCommit,
GitTree = require('./internal/git_tree').GitTree,
GitTag = require('./internal/git_tag').GitTag,
GitBlob = require('./internal/git_blob').GitBlob;
var GitObject = exports.GitObject = function() {}
GitObject.from_raw = function(raw_object, repository) {
if(raw_object.type == "blob") {
return GitBlob.from_raw(raw_object, repository);
} else if(raw_object.type == "tree") {
return GitTree.from_raw(raw_object, repository);
} else if(raw_object.type == "commit") {
return GitCommit.from_raw(raw_object, repository);
} else if(raw_object.type == "tag") {
return GitTag.from_raw(raw_object, repository);
} else {
throw "got invalid object-type";
}
}

54
node_modules/git/lib/git/head.js generated vendored Normal file
View File

@ -0,0 +1,54 @@
var util = require('util'),
Commit = require('./commit').Commit;
var Head = exports.Head = function(name, commit) {
var _name = name;
var _commit = commit;
// Define the properties
Object.defineProperty(this, "name", { get: function() { return _name; }, enumerable: true});
Object.defineProperty(this, "commit", { get: function() { return _commit; }, enumerable: true});
}
var prefix = function(name) {
return "refs/" + name + "s";
}
Head.current = function(repo, options, callback) {
var args = Array.prototype.slice.call(arguments, 1);
callback = args.pop();
options = args.length ? args.shift() : {};
// Let's read the the head
repo.git.fs_read('HEAD', function(err, head) {
if(err) return callback(err, head);
var matches = head.toString().match(/ref: refs\/heads\/(.*)/);
if(!matches) return callback(null, null);
// we have a correct reference, create a new head reference
repo.git.rev_parse(options, 'HEAD', 0, function(err, rev) {
if(err) return callback(err, rev);
return callback(null, new Head(matches[1], rev));
});
});
}
Head.find_all = function(repo, options, callback) {
var args = Array.prototype.slice.call(arguments, 1);
callback = args.pop();
options = args.length ? args.shift() : {};
// Let's fetch the references
repo.git.refs({}, prefix('head'), function(err, refs) {
if(err) return callback(err, refs);
// Map the references
var mapped_refs = refs.split(/\n/).map(function(ref) {
// Fetch the name and id for the reference
var split_reference = ref.split(/ /);
var name = split_reference[0];
var id = split_reference[1];
// Create a commit object wit the id
var commit = new Commit(repo, id);
// Wrap the commit object in a head object and return mapped object
return new Head(name, commit);
})
callback(null, mapped_refs);
})
}

36
node_modules/git/lib/git/index.js generated vendored Normal file
View File

@ -0,0 +1,36 @@
[
'actor', 'binary_parser', 'blame', 'blame_line', 'blob', 'commit', 'commit_stats', 'config',
'diff', 'file_index', 'file_window', 'git', 'git_file_operations', 'git_index', 'git_object',
'head', 'loose_storage', 'merge', 'pack_storage', 'raw_object', 'ref', 'remote', 'repo', 'repository',
'status', 'status_file', 'sub_module', 'tag', 'tree', 'user_info',
'internal/directory_entry', 'internal/git_blob', 'internal/git_commit', 'internal/git_tag', 'internal/git_tree'
].forEach(function(path){
var module = require('./' + path);
for (var i in module)
exports[i] = module[i];
});
[
'zlib'
].forEach(function(path){
var module = require('../zlib/' + path);
for (var i in module)
exports[i] = module[i];
});
[
'sprintf'
].forEach(function(path){
var module = require('../sprintf/' + path);
for (var i in module)
exports[i] = module[i];
});
[
'block', 'callbacks', 'change', 'diff', 'hunk'
].forEach(function(path){
var module = require('../diff/' + path);
for (var i in module)
exports[i] = module[i];
});

60
node_modules/git/lib/git/internal/directory_entry.js generated vendored Normal file
View File

@ -0,0 +1,60 @@
var StringUtil = require('../../sprintf/sprintf').StringUtil;
var S_IFMT = parseInt('00170000', 8);
var S_IFLNK = parseInt('0120000', 8);
var S_IFREG = parseInt('0100000', 8);
var S_IFDIR = parseInt('0040000', 8);
var S_IFGITLINK = parseInt('0160000', 8);
var DirectoryEntry = exports.DirectoryEntry = function(mode, file_name, sha1) {
var _mode = 0, _name = file_name, _sha1 = sha1;
var self = this;
// Process the mode to correctly get the right value
for(var i = 0; i < mode.length; i++) {
_mode = (_mode << 3) | (mode.charCodeAt(i) - '0'.charCodeAt(0));
}
// Internal properties
Object.defineProperty(this, "mode", { get: function() { return _mode; }, set: function(value) { _mode = value; }, enumerable: true});
Object.defineProperty(this, "name", { get: function() { return _name; }, set: function(value) { _name = value; }, enumerable: true});
Object.defineProperty(this, "sha1", { get: function() { return _sha1; }, set: function(value) { _sha1 = value; }, enumerable: true});
// Return the type of entry
Object.defineProperty(this, "type", { get: function() {
var type = self.mode & S_IFMT;
if(type == S_IFGITLINK) {
return 'submodule';
} else if(type == S_IFLNK) {
return 'link';
} else if(type == S_IFDIR) {
return 'directory';
} else if(type == S_IFREG) {
return 'file';
} else {
return null;
}
}, enumerable: true});
Object.defineProperty(this, "format_type", { get:function() {
var type = this.type;
if(type == 'link') {
return 'link';
} else if(type == 'directory') {
return 'tree';
} else if(type == 'file') {
return 'blob';
} else if(type == 'submodule') {
return 'commit';
}
}, enumerable: false});
Object.defineProperty(this, "format_mode", { get:function() {
return StringUtil.sprintf("%06o", _mode);
}, enumerable: false});
// Ensure we don't have an illegal type of directory
if([S_IFLNK, S_IFDIR, S_IFREG, S_IFGITLINK].indexOf(_mode & S_IFMT) == -1) {
throw "unknown type for directory entry";
}
}

14
node_modules/git/lib/git/internal/git_blob.js generated vendored Normal file
View File

@ -0,0 +1,14 @@
var util = require('util');
var GitBlob = exports.GitBlob = function(content, repository) {
var _content = content, _repository = repository;
Object.defineProperty(this, "content", { get: function() { return _content; }, set: function(value) { _content = value; }, enumerable: true});
Object.defineProperty(this, "repository", { get: function() { return _repository; }, set: function(value) { _repository = value; }, enumerable: false});
Object.defineProperty(this, "type", { get: function() { return "blob"; }, enumerable: true});
Object.defineProperty(this, "raw_content", { get: function() { return _content; }, set: function(value) { _content = value; }, enumerable: true});
}
GitBlob.from_raw = function(raw_object, repository) {
return new GitBlob(raw_object.content);
}

87
node_modules/git/lib/git/internal/git_commit.js generated vendored Normal file
View File

@ -0,0 +1,87 @@
var util = require('util'),
UserInfo = require('../user_info').UserInfo;
var GitCommit = exports.GitCommit = function(tree, parent, author, committer, message, headers, repository) {
var _tree = tree, _parent = parent, _author = author, _committer = committer, _message = message, _headers = headers, _repository = repository;
Object.defineProperty(this, "tree", { get: function() { return _tree; }, enumerable: true});
Object.defineProperty(this, "parent", { get: function() { return _parent; }, enumerable: true});
Object.defineProperty(this, "author", { get: function() { return _author; }, enumerable: true});
Object.defineProperty(this, "committer", { get: function() { return _committer; }, enumerable: true});
Object.defineProperty(this, "message", { get: function() { return _message; }, enumerable: true});
Object.defineProperty(this, "headers", { get: function() { return _headers; }, enumerable: true});
Object.defineProperty(this, "repository", { get: function() { return _repository; }, enumerable: true});
Object.defineProperty(this, "type", { get: function() { return "commit"; }, enumerable: true});
// Raw content of commit
Object.defineProperty(this, "raw_content", { get: function() {
return "tree " + _tree + "\n"
+ _parent.map(function(i) { return "parent " + i +"\n"; }).join('')
+ "author " + _author + "\ncommitter " + _committer + "\n\n" + _message;
}, enumerable: true});
}
// Create a commit from a raw object
GitCommit.from_raw = function(raw_object, repository) {
var parent = [];
var tree = null, author = null, committer = null;
// Split the text but only grab the 2 first blocks
var split_result = raw_object.content.split(/\n\n/);
var headers = split_result.shift();
var message = split_result.join("\n\n");
// get all the headers
var all_headers = headers.split(/\n/).map(function(header) {
var parts = header.split(/ /);
return [parts.shift(), parts.join(" ")];
})
// Iterate over all the headers
all_headers.forEach(function(header) {
var key = header[0];
var value = header[1];
if(key == "tree") {
tree = value;
} else if(key == "parent") {
parent.push(value);
} else if(key == "author") {
author = new UserInfo(value);
} else if(key == "committer") {
committer = new UserInfo(value);
} else {
// Unknow header
util.puts("unknow header '" + key + "' in commit " + raw_object.sha_hex())
}
})
if(!tree && !author && !committer) {
throw "incomplete raw commit object";
}
// Return the git commit object
return new GitCommit(tree, parent, author, committer, message, headers, repository);
}
GitCommit.prototype.raw_log = function(sha1) {
var output = "commit " + sha1 + "\n";
output = output + this.headers + "\n\n";
var lines = this.message.split("\n");
// Remove the last line which will be empty
for(var i = 0; i < (lines.length > 1 ? lines.length - 1 : lines.length); i++) {
output = output + ' ' + lines[i] + '\n';
}
// Return the output
return output + '\n';
}

69
node_modules/git/lib/git/internal/git_tag.js generated vendored Normal file
View File

@ -0,0 +1,69 @@
var util = require('util'),
UserInfo = require('../user_info').UserInfo;
var GitTag = exports.GitTag = function(object, type, tag, tagger, message, repository) {
var _object = object, _type = type, _tag = tag, _tagger = tagger, _message = message, _repository = repository;
Object.defineProperty(this, "repository", { get: function() { return _repository; }, set: function(value) { _repository = value; }, enumerable: true});
Object.defineProperty(this, "object", { get: function() { return _object; }, set: function(value) { _object = value; }, enumerable: true});
// Object.defineProperty(this, "type", { get: function() { return _type; }, set: function(value) { _type = value; }, enumerable: true});
Object.defineProperty(this, "tag", { get: function() { return _tag; }, set: function(value) { _tag = value; }, enumerable: true});
Object.defineProperty(this, "tagger", { get: function() { return _tagger; }, set: function(value) { _tagger = value; }, enumerable: true});
Object.defineProperty(this, "message", { get: function() { return _message; }, set: function(value) { _message = value; }, enumerable: true});
Object.defineProperty(this, "type", { get: function() { return "tag"; }, enumerable: true});
Object.defineProperty(this, "raw_content", { get: function() {
return "object " + _object + "\ntype " + _type + "\ntag " + _tag + "\ntagger " + _tagger + " " + _message + "\n\n";
}, enumerable: false});
}
GitTag.from_raw = function(raw_object, repository) {
var parts = raw_object.content.split("\n\n");
var headers = parts.shift();
var message = parts.join(/\n\n/);
// Further split the headers
headers = headers.split(/\n/).map(function(header) {
var parts = header.split(/ /);
return [parts.shift(), parts.join(" ")];
})
// Initialize base variables
var object = '', type = '', tag = '', tagger = '';
headers.forEach(function(header) {
var key = header[0];
var value = header[1];
if(key == 'object') {
object = value;
} else if (key == 'type') {
if(['blob', 'tree', 'commit', 'tag'].indexOf(value) == -1) {
throw "invalid type in tag";
}
// Set the type
type = value;
} else if(key == 'tag') {
tag = value;
} else if(key == 'tagger') {
tagger = new UserInfo(value);
} else {
util.puts("unknown header '" + key);
}
});
// If we have an illegal tag object
if(object == null || type == null || tag == null || tagger == null) {
throw "incomplete raw tag object";
}
// Return the tag
return new GitTag(object, type, tag, tagger, message, repository);
}

70
node_modules/git/lib/git/internal/git_tree.js generated vendored Normal file
View File

@ -0,0 +1,70 @@
var util = require('util'),
BinaryParser = require('../binary_parser').BinaryParser,
DirectoryEntry = require('../internal/directory_entry').DirectoryEntry;
var GitTree = exports.GitTree = function(entries, repository) {
var _entries = entries ? entries : [], _repository = repository;
// Internal properties
Object.defineProperty(this, "entries", { get: function() { return _entries; }, set: function(value) { _entries = value; }, enumerable: true});
Object.defineProperty(this, "repository", { get: function() { return _repository; }, set: function(value) { _repository = value; }, enumerable: true});
Object.defineProperty(this, "type", { get: function() { return "tree"; }, enumerable: true});
// Raw content of commit
Object.defineProperty(this, "raw_content", { get: function() {
return _entries.map(function(e) {
return [[e.format_mode, e.format_type, e.sha1].join(' '), e.name].join('\t')
}).join('\n');
}, enumerable: true});
}
var read_until_chr = function(index, content, char) {
var found = false;
var content_length = content.length;
var chr_code = char.charCodeAt(0);
var offset = 0;
// Search until we locate the content
while(!found && (index + offset) < content_length) {
if(content.charCodeAt(index + offset) == chr_code) found = true;
offset = offset + 1;
}
// Extract content and return
return content.substr(index, offset - 1);
}
var to_hex_string = function(string) {
var hexString = '';
for(var index = 0; index < string.length; index++) {
var value = BinaryParser.toByte(string.substr(index, 1));
var number = value <= 15 ? "0" + value.toString(16) : value.toString(16);
hexString = hexString + number;
}
return hexString;
};
// Create Tree Object from Raw stream
GitTree.from_raw = function(raw_object, repository) {
var entries = [];
var index = 0;
var content_length = raw_object.content.length;
var content = raw_object.content;
// Parse the content
while(index < content_length) {
var mode = read_until_chr(index, content, ' ');
index = index + mode.length + 1;
var file_name = read_until_chr(index, content, '\0');
index = index + file_name.length + 1;
var raw_sha = content.substr(index, 20);
index = index + raw_sha.length;
var sha = to_hex_string(raw_sha);
// Add the Entry to the directory list
entries.push(new DirectoryEntry(mode, file_name, sha));
}
// Return a tree with all the entries
return new GitTree(entries, repository);
}

172
node_modules/git/lib/git/loose_storage.js generated vendored Normal file
View File

@ -0,0 +1,172 @@
var util = require('util'),
fs = require('fs'),
BinaryParser = require('./binary_parser').BinaryParser,
Zlib = require('../zlib/zlib').Zlib,
RawObject = require('./raw_object').RawObject,
crypto = require('crypto'),
zlib = require('zlib');
var OBJ_TYPES = [null, "commit", "tree", "blob", "tag"];
LooseStorage = exports.LooseStorage = function(directory) {
var _directory = directory;
Object.defineProperty(this, "directory", { get: function() { return _directory; }, set: function(value) { _directory = value; }, enumerable: true});
}
LooseStorage.prototype.find = function(sha1) {
try {
sha1 = to_hex_string(sha1);
// If we don't have a valid sha
if(sha1.length != 40) return null;
// Directory path
var path = this.directory + "/" + sha1.substring(0, 2) + '/' + sha1.substring(2, 40);
return this.get_raw_object(fs.readFileSync(path));
} catch(err) {
return null;
}
}
// Read and parse the raw object
LooseStorage.prototype.get_raw_object = function(buf) {
if(buf.length < 2) throw "object file too small";
// Set up variables
var type = null;
var size = null;
var used = null;
var content = null;
if(this.is_legacy_loose_object(buf)) {
content = new Zlib.Unzip(buf).unzip();
content = Array.isArray(content) ? content[0] : content;
// Let's split the content up
var parts = content.split(/\0/)
var header = parts.shift();
content = parts.join("\0");
// if no header or content we got an invalid object header
if(header == null || content == null) throw "invalid object header";
// Split out the header
parts = header.split(/ /);
type = parts[0];
size = parts[1];
// Check that we have a valid type
if(['blob', 'tree', 'commit', 'tag'].indexOf(type) == -1 || !size.match(/^\d+$/)) throw "invalid object header";
// Convert parts
size = parseInt(size, 10);
} else {
var parts = this.unpack_object_header_gently(buf);
type = parts[0];
size = parts[1];
used = parts[2];
// Unpack content
content = new Zlib.Unzip(buf.slice(used, buf.length)).unzip();
content = Array.isArray(content) ? content[0] : content;
}
// Return a raw object
return new RawObject(type, content);
}
LooseStorage.prototype.unpack_object_header_gently = function(buf) {
var used = 0
var c = buf[used];
used = used + 1;
var type = (c >> 4) & 7;
var size = c & 15;
var shift = 4;
while(c & 0x80 != 0) {
if(buf.length <= used) throw "object file too short";
// Get next char
c = buf[used];
used = used + 1;
// Calculate size
size = size + ((c & 0x7f) << shift);
}
// Fetch the type
type = OBJ_TYPES[type];
// Check that we have a valid type
if(['blob', 'tree', 'commit', 'tag'].indexOf(type) == -1) throw "invalid loose object type";
return [type, size, used];
}
LooseStorage.prototype.is_legacy_loose_object = function(buf) {
var word = (buf[0] << 8) + buf[1];
return buf[0] == 0x78 && word % 31 == 0;
}
var to_hex_string = function(string) {
var hexString = '';
for(var index = 0; index < string.length; index++) {
var value = BinaryParser.toByte(string.substr(index, 1));
var number = value <= 15 ? "0" + value.toString(16) : value.toString(16);
hexString = hexString + number;
}
return hexString;
};
// currently, I'm using the legacy format because it's easier to do
// this function takes content and a type and writes out the loose object and returns a sha
LooseStorage.prototype.put_raw_object = function(content, type, callback) {
var self = this;
// Retrieve size of message
var size = content.length.toString();
// Verify that header is ok
LooseStorage.verify_header(type, size);
// Create header
var header = "" + type + " " + size + "\0";
var store = header + content;
// Use node crypto library to create sha1 hash
var hash = crypto.createHash("sha1");
hash.update(store);
// Return the hash digest
var sha1 = hash.digest('hex');
// Create path
var path = this.directory + "/" + sha1.substr(0, 2) + '/' + sha1.substr(2);
try {
fs.statSync(path);
} catch(err) {
// Deflate the data
var data = zlib.gunzip(store, function (err, buffer) {
if (err) {
throw err;
}
// File does not exist create the directory
fs.mkdir(self.directory + "/" + sha1.substr(0, 2), 16877, function (err) {
if (err) {
throw err;
}
fs.writeFile(path, data, 'binary', function (err) {
if (err) {
throw err;
}
callback(sha1);
});
});
});
}
}
LooseStorage.verify_header = function(type, size) {
if(["blob", "tree", "commit", "tag"].indexOf(type) == -1 || size.match(/^\d+$/) == null) {
throw "invalid object header";
}
}

39
node_modules/git/lib/git/merge.js generated vendored Normal file
View File

@ -0,0 +1,39 @@
var Merge = exports.Merge = function(str) {
var _conflicts = 0, _text = {}, _sections = null;
var section = 0;
var status = Merge.STATUS_BOTH;
Object.defineProperty(this, "conflicts", { get: function() { return _conflicts; }, set: function(value) { _conflicts = value; }, enumerable: true});
Object.defineProperty(this, "text", { get: function() { return _text; }, set: function(value) { _text = value; }, enumerable: true});
Object.defineProperty(this, "sections", { get: function() { return _sections; }, set: function(value) { _sections = value; }, enumerable: true});
var lines = str.split("\n");
lines.forEach(function(line) {
if(line.match(/^<<<<<<< (.*?)/)) {
status = Merge.STATUS_OURS;
_conflicts = _conflicts + 1;
section = section + 1;
} else if(line == '=======') {
status = Merge.STATUS_THEIRS;
} else if(line.match(/^>>>>>>> (.*?)/)) {
status = Merge.STATUS_BOTH;
section = section + 1;
} else {
_text[section] = _text[section] == null ? {} : _text[section];
_text[section][status] = _text[section][status] == null ? [] : _text[section][status];
_text[section][status].push(line);
}
});
// Let's set the values
_text = Object.keys(_text).map(function(key) {
return _text[key];
});
// Set the number of sections
_sections = _text.length;
}
// Static function
Merge.STATUS_BOTH = 'both';
Merge.STATUS_OURS = 'ours';
Merge.STATUS_THEIRS = 'theirs';

Some files were not shown because too many files have changed in this diff Show More